commit 9c20dd40921b48ed83a3a18bb6ac560e82ac2a04 Author: Sienna Meridian Satterwhite Date: Sun Feb 8 13:24:35 2026 +0000 feat: implement storybook DSL with template composition and validation Add complete domain-specific language for authoring narrative content for agent simulations. Features: - Complete parser using LALRPOP + logos lexer - Template composition (includes + multiple inheritance) - Strict mode validation for templates - Reserved keyword protection - Semantic validators (trait ranges, schedule overlaps, life arcs, behaviors) - Name resolution and cross-reference tracking - CLI tool (validate, inspect, query commands) - Query API with filtering - 260 comprehensive tests (unit, integration, property-based) Implementation phases: - Phase 1 (Parser): Complete - Phase 2 (Resolution + Validation): Complete - Phase 3 (Public API + CLI): Complete BREAKING CHANGE: Initial implementation diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..46b5d68 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +/target +.envrc diff --git a/.serena/.gitignore b/.serena/.gitignore new file mode 100644 index 0000000..14d86ad --- /dev/null +++ b/.serena/.gitignore @@ -0,0 +1 @@ +/cache diff --git a/.serena/project.yml b/.serena/project.yml new file mode 100644 index 0000000..7f6618c --- /dev/null +++ b/.serena/project.yml @@ -0,0 +1,112 @@ +# the name by which the project can be referenced within Serena +project_name: "storybook" + + +# list of languages for which language servers are started; choose from: +# al bash clojure cpp csharp +# csharp_omnisharp dart elixir elm erlang +# fortran fsharp go groovy haskell +# java julia kotlin lua markdown +# matlab nix pascal perl php +# powershell python python_jedi r rego +# ruby ruby_solargraph rust scala swift +# terraform toml typescript typescript_vts vue +# yaml zig +# (This list may be outdated. For the current list, see values of Language enum here: +# https://github.com/oraios/serena/blob/main/src/solidlsp/ls_config.py +# For some languages, there are alternative language servers, e.g. csharp_omnisharp, ruby_solargraph.) +# Note: +# - For C, use cpp +# - For JavaScript, use typescript +# - For Free Pascal/Lazarus, use pascal +# Special requirements: +# Some languages require additional setup/installations. +# See here for details: https://oraios.github.io/serena/01-about/020_programming-languages.html#language-servers +# When using multiple languages, the first language server that supports a given file will be used for that file. +# The first language is the default language and the respective language server will be used as a fallback. +# Note that when using the JetBrains backend, language servers are not used and this list is correspondingly ignored. +languages: +- rust + +# the encoding used by text files in the project +# For a list of possible encodings, see https://docs.python.org/3.11/library/codecs.html#standard-encodings +encoding: "utf-8" + +# whether to use project's .gitignore files to ignore files +ignore_all_files_in_gitignore: true + +# list of additional paths to ignore in all projects +# same syntax as gitignore, so you can use * and ** +ignored_paths: [] + +# whether the project is in read-only mode +# If set to true, all editing tools will be disabled and attempts to use them will result in an error +# Added on 2025-04-18 +read_only: false + +# list of tool names to exclude. We recommend not excluding any tools, see the readme for more details. +# Below is the complete list of tools for convenience. +# To make sure you have the latest list of tools, and to view their descriptions, +# execute `uv run scripts/print_tool_overview.py`. +# +# * `activate_project`: Activates a project by name. +# * `check_onboarding_performed`: Checks whether project onboarding was already performed. +# * `create_text_file`: Creates/overwrites a file in the project directory. +# * `delete_lines`: Deletes a range of lines within a file. +# * `delete_memory`: Deletes a memory from Serena's project-specific memory store. +# * `execute_shell_command`: Executes a shell command. +# * `find_referencing_code_snippets`: Finds code snippets in which the symbol at the given location is referenced. +# * `find_referencing_symbols`: Finds symbols that reference the symbol at the given location (optionally filtered by type). +# * `find_symbol`: Performs a global (or local) search for symbols with/containing a given name/substring (optionally filtered by type). +# * `get_current_config`: Prints the current configuration of the agent, including the active and available projects, tools, contexts, and modes. +# * `get_symbols_overview`: Gets an overview of the top-level symbols defined in a given file. +# * `initial_instructions`: Gets the initial instructions for the current project. +# Should only be used in settings where the system prompt cannot be set, +# e.g. in clients you have no control over, like Claude Desktop. +# * `insert_after_symbol`: Inserts content after the end of the definition of a given symbol. +# * `insert_at_line`: Inserts content at a given line in a file. +# * `insert_before_symbol`: Inserts content before the beginning of the definition of a given symbol. +# * `list_dir`: Lists files and directories in the given directory (optionally with recursion). +# * `list_memories`: Lists memories in Serena's project-specific memory store. +# * `onboarding`: Performs onboarding (identifying the project structure and essential tasks, e.g. for testing or building). +# * `prepare_for_new_conversation`: Provides instructions for preparing for a new conversation (in order to continue with the necessary context). +# * `read_file`: Reads a file within the project directory. +# * `read_memory`: Reads the memory with the given name from Serena's project-specific memory store. +# * `remove_project`: Removes a project from the Serena configuration. +# * `replace_lines`: Replaces a range of lines within a file with new content. +# * `replace_symbol_body`: Replaces the full definition of a symbol. +# * `restart_language_server`: Restarts the language server, may be necessary when edits not through Serena happen. +# * `search_for_pattern`: Performs a search for a pattern in the project. +# * `summarize_changes`: Provides instructions for summarizing the changes made to the codebase. +# * `switch_modes`: Activates modes by providing a list of their names +# * `think_about_collected_information`: Thinking tool for pondering the completeness of collected information. +# * `think_about_task_adherence`: Thinking tool for determining whether the agent is still on track with the current task. +# * `think_about_whether_you_are_done`: Thinking tool for determining whether the task is truly completed. +# * `write_memory`: Writes a named memory (for future reference) to Serena's project-specific memory store. +excluded_tools: [] + +# list of tools to include that would otherwise be disabled (particularly optional tools that are disabled by default) +included_optional_tools: [] + +# fixed set of tools to use as the base tool set (if non-empty), replacing Serena's default set of tools. +# This cannot be combined with non-empty excluded_tools or included_optional_tools. +fixed_tools: [] + +# list of mode names to that are always to be included in the set of active modes +# The full set of modes to be activated is base_modes + default_modes. +# If the setting is undefined, the base_modes from the global configuration (serena_config.yml) apply. +# Otherwise, this setting overrides the global configuration. +# Set this to [] to disable base modes for this project. +# Set this to a list of mode names to always include the respective modes for this project. +base_modes: + +# list of mode names that are to be activated by default. +# The full set of modes to be activated is base_modes + default_modes. +# If the setting is undefined, the default_modes from the global configuration (serena_config.yml) apply. +# Otherwise, this overrides the setting from the global configuration (serena_config.yml). +# This setting can, in turn, be overridden by CLI parameters (--mode). +default_modes: + +# initial prompt for the project. It will always be given to the LLM upon activating the project +# (contrary to the memories, which are loaded on demand). +initial_prompt: "" diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..3f70b19 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,1639 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "addr2line" +version = "0.25.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "aho-corasick" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" +dependencies = [ + "memchr", +] + +[[package]] +name = "anstream" +version = "0.6.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" + +[[package]] +name = "anstyle-parse" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" +dependencies = [ + "anstyle", + "once_cell_polyfill", + "windows-sys 0.61.2", +] + +[[package]] +name = "anyhow" +version = "1.0.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea" + +[[package]] +name = "ascii-canvas" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6" +dependencies = [ + "term", +] + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "backtrace" +version = "0.3.76" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-link", +] + +[[package]] +name = "backtrace-ext" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "537beee3be4a18fb023b570f80e3ae28003db9167a751266b259926e25539d50" +dependencies = [ + "backtrace", +] + +[[package]] +name = "beef" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" + +[[package]] +name = "bit-set" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0481a0e032742109b1133a095184ee93d88f3dc9e0d28a5d033dc77a073f44f" +dependencies = [ + "bit-vec 0.7.0", +] + +[[package]] +name = "bit-set" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" +dependencies = [ + "bit-vec 0.8.0", +] + +[[package]] +name = "bit-vec" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2c54ff287cfc0a34f38a6b832ea1bd8e448a330b3e40a50859e6488bee07f22" + +[[package]] +name = "bit-vec" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "clap" +version = "4.5.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6899ea499e3fb9305a65d5ebf6e3d2248c5fab291f300ad0a704fbe142eae31a" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b12c8b680195a62a8364d16b8447b01b6c2c8f9aaf68bee653be34d4245e238" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.5.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "clap_lex" +version = "0.7.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3e64b0cc0439b12df2fa678eae89a1c56a529fd067a9115f7827f1fffd22b32" + +[[package]] +name = "colorchoice" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" + +[[package]] +name = "console" +version = "0.15.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8" +dependencies = [ + "encode_unicode", + "libc", + "once_cell", + "windows-sys 0.59.0", +] + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crypto-common" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] + +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" + +[[package]] +name = "ena" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d248bdd43ce613d87415282f69b9bb99d947d290b10962dd6c56233312c2ad5" +dependencies = [ + "log", +] + +[[package]] +name = "encode_unicode" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "filetime" +version = "0.2.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f98844151eee8917efc50bd9e8318cb963ae8b297431495d3f758616ea5c57db" +dependencies = [ + "cfg-if", + "libc", + "libredox", +] + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "fsevent-sys" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2" +dependencies = [ + "libc", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "getrandom" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", +] + +[[package]] +name = "gimli" +version = "0.32.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "inotify" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff" +dependencies = [ + "bitflags 1.3.2", + "inotify-sys", + "libc", +] + +[[package]] +name = "inotify-sys" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" +dependencies = [ + "libc", +] + +[[package]] +name = "insta" +version = "1.46.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e82db8c87c7f1ccecb34ce0c24399b8a73081427f3c7c50a5d597925356115e4" +dependencies = [ + "console", + "once_cell", + "similar", + "tempfile", +] + +[[package]] +name = "is_ci" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45" + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" + +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + +[[package]] +name = "keccak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" +dependencies = [ + "cpufeatures", +] + +[[package]] +name = "kqueue" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" +dependencies = [ + "kqueue-sys", + "libc", +] + +[[package]] +name = "kqueue-sys" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" +dependencies = [ + "bitflags 1.3.2", + "libc", +] + +[[package]] +name = "lalrpop" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e56f323e2d610628d1f5bdd39168a774674ac7989ed67011963bb3f71edd797" +dependencies = [ + "ascii-canvas", + "bit-set 0.6.0", + "ena", + "itertools", + "lalrpop-util", + "petgraph", + "pico-args", + "regex", + "regex-syntax", + "sha3", + "string_cache", + "term", + "unicode-xid", + "walkdir", +] + +[[package]] +name = "lalrpop-util" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "108dc8f5dabad92c65a03523055577d847f5dcc00f3e7d3a68bc4d48e01d8fe1" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libc" +version = "0.2.180" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" + +[[package]] +name = "libredox" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" +dependencies = [ + "bitflags 2.10.0", + "libc", + "redox_syscall 0.7.0", +] + +[[package]] +name = "linux-raw-sys" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" + +[[package]] +name = "lock_api" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "logos" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7251356ef8cb7aec833ddf598c6cb24d17b689d20b993f9d11a3d764e34e6458" +dependencies = [ + "logos-derive", +] + +[[package]] +name = "logos-codegen" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59f80069600c0d66734f5ff52cc42f2dabd6b29d205f333d61fd7832e9e9963f" +dependencies = [ + "beef", + "fnv", + "lazy_static", + "proc-macro2", + "quote", + "regex-syntax", + "syn", +] + +[[package]] +name = "logos-derive" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24fb722b06a9dc12adb0963ed585f19fc61dc5413e6a9be9422ef92c091e731d" +dependencies = [ + "logos-codegen", +] + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "miette" +version = "7.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f98efec8807c63c752b5bd61f862c165c115b0a35685bdcfd9238c7aeb592b7" +dependencies = [ + "backtrace", + "backtrace-ext", + "cfg-if", + "miette-derive", + "owo-colors", + "supports-color", + "supports-hyperlinks", + "supports-unicode", + "terminal_size", + "textwrap", + "unicode-width 0.1.14", +] + +[[package]] +name = "miette-derive" +version = "7.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db5b29714e950dbb20d5e6f74f9dcec4edbcc1067bb7f8ed198c097b8c1a818b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", +] + +[[package]] +name = "mio" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +dependencies = [ + "libc", + "log", + "wasi", + "windows-sys 0.48.0", +] + +[[package]] +name = "new_debug_unreachable" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" + +[[package]] +name = "notify" +version = "6.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d" +dependencies = [ + "bitflags 2.10.0", + "crossbeam-channel", + "filetime", + "fsevent-sys", + "inotify", + "kqueue", + "libc", + "log", + "mio", + "walkdir", + "windows-sys 0.48.0", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "object" +version = "0.37.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "once_cell_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" + +[[package]] +name = "owo-colors" +version = "4.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c6901729fa79e91a0913333229e9ca5dc725089d1c363b2f4b4760709dc4a52" + +[[package]] +name = "parking_lot" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall 0.5.18", + "smallvec", + "windows-link", +] + +[[package]] +name = "petgraph" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" +dependencies = [ + "fixedbitset", + "indexmap", +] + +[[package]] +name = "phf_shared" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pico-args" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315" + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "precomputed-hash" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "proptest" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37566cb3fdacef14c0737f9546df7cfeadbfbc9fef10991038bf5015d0c80532" +dependencies = [ + "bit-set 0.8.0", + "bit-vec 0.8.0", + "bitflags 2.10.0", + "num-traits", + "rand", + "rand_chacha", + "rand_xorshift", + "regex-syntax", + "rusty-fork", + "tempfile", + "unarray", +] + +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + +[[package]] +name = "quote" +version = "1.0.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" +dependencies = [ + "getrandom 0.3.4", +] + +[[package]] +name = "rand_xorshift" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a" +dependencies = [ + "rand_core", +] + +[[package]] +name = "redox_syscall" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +dependencies = [ + "bitflags 2.10.0", +] + +[[package]] +name = "redox_syscall" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f3fe0889e69e2ae9e41f4d6c4c0181701d00e4697b356fb1f74173a5e0ee27" +dependencies = [ + "bitflags 2.10.0", +] + +[[package]] +name = "redox_users" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" +dependencies = [ + "getrandom 0.2.17", + "libredox", + "thiserror", +] + +[[package]] +name = "regex" +version = "1.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a96887878f22d7bad8a3b6dc5b7440e0ada9a245242924394987b21cf2210a4c" + +[[package]] +name = "rustc-demangle" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b50b8869d9fc858ce7266cce0194bd74df58b9d0e3f6df3a9fc8eb470d95c09d" + +[[package]] +name = "rustix" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" +dependencies = [ + "bitflags 2.10.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "rusty-fork" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc6bf79ff24e648f6da1f8d1f011e9cac26491b619e6b9280f2b47f1774e6ee2" +dependencies = [ + "fnv", + "quick-error", + "tempfile", + "wait-timeout", +] + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", +] + +[[package]] +name = "sha3" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" +dependencies = [ + "digest", + "keccak", +] + +[[package]] +name = "similar" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" + +[[package]] +name = "siphasher" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "storybook" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap", + "indexmap", + "insta", + "lalrpop", + "lalrpop-util", + "logos", + "miette", + "notify", + "petgraph", + "proptest", + "serde", + "strsim", + "tempfile", + "thiserror", + "toml", + "walkdir", +] + +[[package]] +name = "string_cache" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf776ba3fa74f83bf4b63c3dcbbf82173db2632ed8452cb2d891d33f459de70f" +dependencies = [ + "new_debug_unreachable", + "parking_lot", + "phf_shared", + "precomputed-hash", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "supports-color" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c64fc7232dd8d2e4ac5ce4ef302b1d81e0b80d055b9d77c7c4f51f6aa4c867d6" +dependencies = [ + "is_ci", +] + +[[package]] +name = "supports-hyperlinks" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e396b6523b11ccb83120b115a0b7366de372751aa6edf19844dfb13a6af97e91" + +[[package]] +name = "supports-unicode" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2" + +[[package]] +name = "syn" +version = "2.0.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "tempfile" +version = "3.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" +dependencies = [ + "fastrand", + "getrandom 0.3.4", + "once_cell", + "rustix", + "windows-sys 0.61.2", +] + +[[package]] +name = "term" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" +dependencies = [ + "dirs-next", + "rustversion", + "winapi", +] + +[[package]] +name = "terminal_size" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b8cb979cb11c32ce1603f8137b22262a9d131aaa5c37b5678025f22b8becd0" +dependencies = [ + "rustix", + "windows-sys 0.60.2", +] + +[[package]] +name = "textwrap" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c13547615a44dc9c452a8a534638acdf07120d4b6847c8178705da06306a3057" +dependencies = [ + "unicode-linebreak", + "unicode-width 0.2.2", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "toml" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "toml_write", + "winnow", +] + +[[package]] +name = "toml_write" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" + +[[package]] +name = "typenum" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" + +[[package]] +name = "unarray" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" + +[[package]] +name = "unicode-ident" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" + +[[package]] +name = "unicode-linebreak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" + +[[package]] +name = "unicode-width" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" + +[[package]] +name = "unicode-width" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "wait-timeout" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" +dependencies = [ + "libc", +] + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasip2" +version = "1.0.2+wasi-0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.5", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" + +[[package]] +name = "winnow" +version = "0.7.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" +dependencies = [ + "memchr", +] + +[[package]] +name = "wit-bindgen" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" + +[[package]] +name = "zerocopy" +version = "0.8.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db6d35d663eadb6c932438e763b262fe1a70987f9ae936e60158176d710cae4a" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4122cd3169e94605190e77839c9a40d40ed048d305bfdc146e7df40ab0f3e517" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..2272c99 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,36 @@ +[package] +name = "storybook" +version = "0.1.0" +edition = "2021" + +[lib] +name = "storybook" +path = "src/lib.rs" + +[[bin]] +name = "sb" +path = "src/bin/sb.rs" + +[dependencies] +logos = "0.14" +lalrpop-util = "0.21" +miette = { version = "7.0", features = ["fancy"] } +thiserror = "1.0" +strsim = "0.11" # Fuzzy matching for "did you mean?" suggestions +indexmap = "2.0" # Order-preserving maps +petgraph = "0.6" # Cycle detection +# Phase 3: Public API + CLI +anyhow = "1.0" # Error handling +clap = { version = "4.5", features = ["derive"] } +notify = "6.0" # Filesystem watching +toml = "0.8" # storybook.toml parsing +walkdir = "2.4" # Directory traversal +serde = { version = "1.0", features = ["derive"] } + +[build-dependencies] +lalrpop = "0.21" + +[dev-dependencies] +proptest = "1.4" +insta = "1.34" +tempfile = "3.8" # Temporary directories for integration tests diff --git a/build.rs b/build.rs new file mode 100644 index 0000000..e99fbd9 --- /dev/null +++ b/build.rs @@ -0,0 +1,7 @@ +fn main() { + lalrpop::Configuration::new() + .use_cargo_dir_conventions() + .emit_rerun_directives(true) + .process_current_dir() + .unwrap(); +} diff --git a/design.md b/design.md new file mode 100644 index 0000000..67fe3db --- /dev/null +++ b/design.md @@ -0,0 +1,1306 @@ +# Storybook DSL — Design Plan + +**Status:** Proposal +**Author:** Sienna + Lonni +**Date:** February 2026 +**Scope:** Content authoring language, parser, resolver, tooling for Aspen agent simulation + +--- + +## 1. What This Document Covers + +This plan describes **Storybook** (`.sb`), a domain-specific language for authoring the content that drives Aspen's agent simulation: characters, life arcs, behavior trees, schedules, institutions, relationships, conditions, and the prose that gives them life. It covers the language design, the cross-referencing system, the implementation architecture, key design decisions and their rationale, and a phased build plan. + +The primary user is Lonni, who is creative, intelligent, not a developer, and needs to build out an entire village's worth of characters, relationships, and narrative structure. The secondary user is Sienna, who will author behavior trees, schema definitions, and engine-facing content. The tertiary consumer is the Aspen game engine itself, which ingests compiled storybook data at runtime. + +--- + +## 2. The Authoring Boundary + +The most important design decision is **what Lonni authors vs. what the engine generates at runtime.** The RFC describes rich runtime state — bond strengths evolving through interaction, need levels decaying, schedules regenerating daily. Lonni doesn't author runtime state. She authors: + +| Lonni Authors | Engine Generates | +|---|---| +| Character personas (personality, backstory, starting state) | Actual need levels (decaying over time) | +| Relationship templates (what "Spousal" means structurally) | Actual bond strengths (evolving through interaction) | +| Relationship instances (Martha and David are Spousal at sim start) | Life arc state transitions during play | +| Life arc definitions (the state machines themselves) | Schedule instances (generated daily from templates + context) | +| Schedule templates (daily patterns) | Behavior tree evaluation results | +| Behavior tree definitions (decision logic) | Substrate state | +| Institution definitions (governance, resources, roles) | Entity positions, pathfinding | +| Condition definitions (effects, contagion profiles) | Wake queue events | +| Locations, trait axes, capability sets | CRDT sync traffic | +| Prose (backstory, descriptions, flavor text) | Emergent interactions | + +The storybook is a **world definition** — initial conditions plus the rules that govern how things evolve. The engine is the physics that runs those rules. This separation is clean, and the grammar should reinforce it: storybook files describe *what could be* and *what starts as*, never *what is happening right now*. + +--- + +## 3. Language Design + +### 3.1 Design Principles + +**Narrative-first.** The language should read like a description of a world, not like configuration. Lonni should be able to read a `.sb` file aloud and it makes sense. + +**One concept, one construct.** Each simulation concept (character, life arc, schedule, etc.) gets dedicated syntax. No shoehorning behavior trees into the same structure as character bios. + +**References over duplication.** Define once, reference everywhere. Override locally when needed. + +**Prose is a first-class citizen.** Backstory, descriptions, and flavor text aren't comments or afterthoughts — they're part of the data, surfaced in-game, and should be comfortable to write. + +**Errors should be kind.** Lonni is not a developer. Error messages must be precise, plain-language, and suggest fixes. + +### 3.2 Top-Level Constructs + +Every `.sb` file contains one or more top-level blocks. The keyword at the start of each block disambiguates unambiguously (LL(1) at the top level). + +``` +-- File-level imports +use + +-- Definitions (one or more per file) +enum { ... } +traits { ... } +capset { ... } +capability { ... } +need_template { ... } +character { ... } +life_arc { ... } +schedule { ... } +behavior { ... } +subtree { ... } +institution { ... } +condition { ... } +relationship { ... } +location { ... } +species { ... } +``` + +### 3.3 Value Types + +The language has a small, fixed type system. No user-defined types beyond enums. + +| Type | Syntax | Examples | +|---|---|---| +| Integer | bare number | `34`, `500`, `0` | +| Float | number with decimal | `0.85`, `1.0`, `0.0` | +| Boolean | keyword | `true`, `false` | +| String | double-quoted | `"hello"` | +| Identifier | bare word | `martha`, `work`, `romantic` | +| Qualified path | dotted or `::` separated | `institutions::Bakery::owner`, `bond::committed` | +| Time literal | `HH:MM` | `5:00`, `18:30`, `23:59` | +| Duration literal | number + unit suffix | `3d`, `7d`, `2h`, `30m`, `45s` | +| Range | value `..` value | `0.0 .. 1.0`, `3d .. 7d` | +| List | `[` comma-separated `]` | `[english, spanish]`, `[mon, tue, wed]` | +| Prose block | `---tag` ... `---` | See §3.4 | + +**Duration units:** `d` (days), `h` (hours), `m` (minutes), `s` (seconds). These are game-time, not real-time. The engine's GameTime RFC defines the mapping. + +**Ranges** are inclusive on both ends. They're used for trait bounds, duration ranges, severity ranges — anywhere a value varies within limits. + +### 3.4 Prose Blocks + +Prose blocks use triple-dash delimiters with a tag name: + +``` +---backstory +Martha grew up in the valley, apprenticing at the old bakery from +age fourteen. When old Henrik retired, she took over — not because +she had grand ambitions, but because the town needed bread and she +knew how to make it. +--- +``` + +Inside a prose block, everything is raw text. No escaping needed. The only rule is that a line containing exactly `---` (and nothing else) ends the block. This means prose can contain dashes, em-dashes, markdown-like formatting, whatever — as long as no line is *only* three dashes. + +Supported prose tags and their semantics are defined by the construct they appear in. A `character` block recognizes `backstory`, `description`, `notes`. An `institution` recognizes `description`, `history`. The validator checks that prose tags are valid for their context. + +### 3.5 Comments + +Line comments use `--`. There are no block comments. This is intentional — block comments interact poorly with prose blocks and add lexer complexity for minimal benefit. + +``` +-- This is a comment +character Martha { -- inline comment + age: 34 +} +``` + +### 3.6 Enums + +Enums define closed sets of valid values. They're used for bond types, coordination levels, autonomy levels, activity types — anything where the set of valid options is fixed. + +``` +enum BondType { + romantic + familial + friendship + professional + caretaking +} + +enum CoordinationLevel { + none -- strangers, no coordination + ad_hoc -- can propose one-off activities + recurring -- can establish patterns + cohabiting -- share schedule generation context + dependent -- one party has authority over other +} +``` + +Enum variants are ordered — their declaration order defines a total ordering usable in expressions (`coordination >= cohabiting`). This matters for coordination level thresholds and bond strength gates. + +Enums can also carry associated data for documentation purposes: + +``` +enum ActivityType { + work -- sustained productive activity + eat -- consuming food or drink + sleep -- resting, dormant + leisure -- self-directed free time + travel -- moving between locations + social -- interaction-focused activity + worship -- religious or spiritual practice + learn -- education, skill development +} +``` + +### 3.7 The Schema Layer + +The engine ships a set of `.sb` files that define the **core schema** — the enums, trait axes, capability types, and need definitions that the simulation expects. These live in a `schema/` directory and are read-only from Lonni's perspective. + +``` +storybook/ +├── schema/ -- shipped with engine, not edited by Lonni +│ ├── core.sb -- BondType, CoordinationLevel, AutonomyLevel, etc. +│ ├── needs.sb -- NeedType enum, base need_templates +│ ├── capabilities.sb -- CapabilityType enum, base capsets +│ └── activities.sb -- ActivityType enum +├── world/ -- Lonni's content +│ ├── characters/ +│ ├── institutions/ +│ └── ... +└── storybook.toml -- project metadata, schema version +``` + +Lonni's content references schema types but can also define new enums for content-specific categorization. The validator ensures her content is compatible with the engine's expectations. + +The `storybook.toml` is the one non-`.sb` file. It contains project metadata that doesn't belong in the DSL: + +```toml +[storybook] +name = "Aspen Village" +schema_version = "0.1" +engine_compat = "0.1" + +[directories] +schema = "schema" +world = "world" +``` + +TOML is fine here because it's a single flat config file Lonni rarely touches. + +--- + +## 4. Cross-Referencing System + +### 4.1 Namespacing + +Every top-level definition has a **qualified name** derived from its file path and block name. The directory structure under `world/` defines the namespace: + +``` +world/characters/martha.sb → defines character Martha + namespace: characters::Martha + +world/institutions/bakery.sb → defines institution Bakery + namespace: institutions::Bakery + +world/shared/traits.sb → defines traits PersonalityTraits + namespace: shared::PersonalityTraits +``` + +The namespace is `::`. Files can define multiple blocks, all sharing the directory prefix. + +### 4.2 `use` Statements + +`use` statements bring names into scope at file level. They go at the top of the file, before any definitions. + +``` +-- Bring a single name into scope +use shared::PersonalityTraits + +-- Bring multiple names from the same directory +use conditions::{Flu, Cold, Heartbreak} + +-- Wildcard: bring everything from a directory +use shared::* + +-- Schema references (implicit prefix) +use schema::core::BondType +``` + +**Within the same directory, bare names resolve without `use`.** If `martha.sb` and `david.sb` are both in `characters/`, Martha can reference David by bare name. Cross-directory references require `use` or a fully qualified path. + +**Schema types are always in scope** without explicit `use`. `BondType`, `CoordinationLevel`, `ActivityType`, etc. are globally visible. This keeps Lonni from needing boilerplate imports for core vocabulary. + +### 4.3 Qualified Paths + +Anywhere a name is expected, a qualified path works: + +``` +roles: [institutions::Bakery::owner, households::Miller::adult] +arc: archetypes::PersonArc at Adult::Partnered +susceptible: [conditions::Flu, conditions::Cold] +``` + +The `::` separator drills into definitions. `institutions::Bakery::owner` means "the `owner` role defined inside the `Bakery` institution in the `institutions/` directory." The resolver validates that each segment of the path exists and is the right kind of thing. + +### 4.4 `link` — Relationship Instantiation + +Relationships are the trickiest cross-reference because they're **bidirectional**, **involve two files**, and **feelings aren't always mutual**. + +#### Basic Syntax + +``` +-- In characters/martha.sb +character Martha { + link David via Spousal { + coordination: cohabiting + started: year 3 + bond: 0.82 -- symmetric: same value for both parties + } +} +``` + +This declares that Martha and David have a Spousal relationship. The resolver handles bidirectionality: + +1. When processing Martha's file, it registers `Martha <-> David via Spousal` +2. When processing David's file, it checks whether David also declares a link to Martha +3. **If David doesn't declare it:** the relationship is automatically bidirectional. David inherits it. +4. **If David does declare it:** the resolver checks compatibility. Shared fields must agree; per-direction fields are merged (see below). + +The recommendation is: **declare each relationship once, in whichever file feels natural.** For Martha and David, put it in Martha's file (or David's — just pick one). The resolver makes it visible from both sides. + +#### Asymmetric Values + +Feelings aren't always mutual. Martha might love David differently than David loves Martha. The `self`/`other` blocks let each party have distinct values: + +``` +character Martha { + link David via Spousal { + -- Shared (inherently bilateral, same for both parties) + coordination: cohabiting + started: year 3 + + -- Per-direction (Martha's feelings toward David) + self { + bond: 0.82 + warmth: 0.9 + tension: 0.1 + } + + -- Per-direction (David's feelings toward Martha) + other { + bond: 0.65 + warmth: 0.7 + tension: 0.25 + } + } +} +``` + +`self` always means "the character this link block appears inside" — Martha in this case. `other` is the other party — David. This is unambiguous regardless of which file the link lives in. + +**Mixing symmetric and asymmetric:** Fields at the top level are shared. The moment a field appears inside `self` or `other`, it becomes per-direction. You can mix freely — `coordination` and `started` are shared (they're either cohabiting or they're not), while `bond`, `warmth`, and `tension` are per-direction because feelings are asymmetric. + +**Shorthand:** If you don't use `self`/`other`, all values are symmetric — the same for both parties. This is the common case for simple relationships: + +``` +link Elena via Friendship { + bond: 0.5 + coordination: recurring +} +``` + +#### Relationship Templates and Asymmetry + +The relationship template declares which fields *support* asymmetry: + +``` +relationship Spousal { + bond_type: romantic + min_coordination: cohabiting + + -- Fields that can differ per-party + asymmetric: [bond, warmth, tension] + + -- Fields that are always shared (coordination, commitments, + -- started are inherently bilateral and cannot be asymmetric) + + commitments { + dinner_together { daily at 18:00, location: home, activity: eat } + sleep_together { daily at bedtime, location: home, activity: sleep } + } + + levels { + acquaintance (0.0 .. 0.2) { enables: [greet] } + friendly (0.2 .. 0.5) { enables: [greet, chat, share_meal] } + close (0.5 .. 0.7) { enables: [greet, chat, share_meal, visit, confide] } + intimate (0.7 .. 1.0) { enables: [all, shared_schedule, cohabitation] } + } +} +``` + +The validator uses the `asymmetric` declaration to catch mistakes — if Lonni puts `coordination` inside a `self` block, she gets a clear error: *"coordination is a shared field in Spousal and cannot differ between parties."* If she puts `bond` at the top level, that's fine — it just means both parties share the same bond strength. + +#### Asymmetric Role Relationships + +Some relationships are asymmetric by *structure*, not just by values — parent/child, employer/employee, mentor/apprentice. These use the `as` clause to assign roles: + +``` +character Martha { + link Tommy via Parental as parent { + coordination: dependent + started: year 0 + + self { -- Martha (parent) toward Tommy + bond: 0.95 + warmth: 0.95 + } + other { -- Tommy (child) toward Martha + bond: 0.9 + warmth: 0.85 + } + } +} +``` + +The `as parent` clause assigns Martha's role. Tommy gets the complementary role (`child`, defined in the `Parental` relationship template). The template defines which roles exist and what each role enables: + +``` +relationship Parental { + bond_type: familial + roles: [parent, child] + asymmetric: [bond, warmth, tension] + + role parent { + authority_over: child + capabilities: [set_schedule, set_boundaries, provide_care] + } + + role child { + autonomy: external -- schedule set by parent + capabilities: [receive_care, seek_comfort] + } +} +``` + +This handles parent-child, employer-employee, caretaker-dependent, mentor-apprentice — any relationship where the two parties have fundamentally different roles and affordances. + +### 4.5 `override` — Template Customization + +When referencing a template (schedule, needs, relationship), the `override` keyword selectively patches fields: + +``` +character Martha { + needs: HumanNeeds { + override sleep { decay: 0.9 } -- only changes decay rate + override social { urgent: 0.6 } -- Martha's more socially needy + } + + schedule: BakerSchedule { + override block lunch { time: 12:30 - 13:00 } + remove block evening_walk + append block meditation { 5:00 - 5:15 activity: meditate at: home } + } +} +``` + +**Merge semantics:** + +| Operation | Meaning | Syntax | +|---|---|---| +| `override { fields }` | Recursive structural merge. Only specified fields are replaced; everything else inherits from the template. | `override sleep { decay: 0.9 }` | +| `remove ` | Delete an inherited item entirely. | `remove block evening_walk` | +| `append { fields }` | Add a new item not present in the template. | `append block meditation { ... }` | + +For nested structures, override is recursive. `override block lunch { time: 12:30 - 13:00 }` patches only the `time` field of the `lunch` block — `activity`, `at`, `priority` all keep their template values. + +**Lists** are replaced wholesale by default. If a template defines `capabilities: [work, trade, parent]` and an override says `capabilities: [work, trade]`, the result is `[work, trade]`. For additive modification, use explicit operations: + +``` +override capabilities { + add mentor + remove parent +} +``` + +### 4.6 `include` — Composition + +`include` is for composing capability sets, need templates, and other aggregates: + +``` +capset FullAdult { + include BasicCapabilities -- pull in everything from BasicCapabilities + work, trade, parent, travel -- add more +} +``` + +This is purely additive — include pulls in all items from the referenced set. It's distinct from `use` (which affects name visibility) and `override` (which patches inherited values). `include` is "copy these items into this definition." + +### 4.7 Resolution Pipeline + +Cross-referencing happens in a multi-pass pipeline after parsing: + +``` +┌─────────────┐ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ +│ 1. Parse │────▶│ 2. Register │────▶│ 3. Resolve │────▶│ 4. Merge │────▶│ 5. Validate │ +│ all .sb │ │ names │ │ references │ │ overrides │ │ semantics │ +│ files │ │ │ │ │ │ │ │ │ +└─────────────┘ └─────────────┘ └─────────────┘ └─────────────┘ └─────────────┘ + │ │ │ │ │ + LALRPOP + Walk all Walk all Apply override Type-check, + logos produce top-level use stmts, /remove/append range-check, + per-file ASTs defs, build qualified to produce constraint + name table paths, link fully-resolved verification, + with kinds stmts. Check entities cross-entity + existence + consistency + kind matching. + Detect cycles. +``` + +**Pass 1 — Parse.** Each `.sb` file is independently parsed by LALRPOP + logos into an AST. Parse errors are reported with file, line, column. No cross-file awareness needed. + +**Pass 2 — Register.** Walk all ASTs, collect every top-level definition into a name table: `{ qualified_path → (kind, file, AST node) }`. Detect duplicate definitions (same name in same namespace). This pass is $O(n)$ in total definitions. + +**Pass 3 — Resolve.** Walk every reference (use statements, qualified paths, link targets, template references) and resolve them against the name table. Check that the referenced thing exists and is the right kind (you can't use a `condition` where a `schedule` is expected). Detect unresolved references with fuzzy-match suggestions: *"Martha references `conditions::Flue` but no condition named `Flue` exists. Did you mean `conditions::Flu`?"* + +Also in this pass: detect circular includes (capset A includes capset B includes capset A). Circular links are fine (Martha links to David, David links to Martha) — they're just bidirectional relationships. + +**Pass 4 — Merge.** For every entity with template references + overrides, produce a fully resolved version. Martha's `needs: HumanNeeds { override sleep { decay: 0.9 } }` becomes a concrete needs block with all values filled in. This pass produces the "flat" representation that the engine can consume directly. + +**Pass 5 — Validate.** Semantic checks: +- Trait values are within declared ranges +- Schedule times don't have impossible overlaps (two mandatory blocks at the same time) +- Life arc transitions reference valid states +- Behavior tree nodes reference valid actions/conditions known to the engine +- Relationship bond values are in `0.0 .. 1.0` +- All roles referenced by characters exist in their institutions +- Shared commitments reference entities that exist +- Condition severity ranges are valid +- Required fields are present (every character needs at minimum an `age` and `species`) + +Validation errors are reported with full context: which file, which block, which field, what's wrong, and what would fix it. + +--- + +## 5. The Expression Language + +Life arc transitions, behavior tree conditions, and constraint rules need a small expression language. + +### 5.1 Scope + +This is deliberately minimal. It's not a programming language. It handles: + +- **Comparisons:** `age >= 13`, `bond >= 0.7`, `coordination >= cohabiting` +- **Logical combinators:** `and`, `or`, `not` +- **Field access:** `entity.age`, `bakery.stock`, `batch.phase` +- **Quantifiers:** `relationship.any(bond >= romantic_interest)`, `children.all(age >= 18)` +- **Event references:** `event::child_born`, `event::death`, `event::separation` +- **State checks:** `is` keyword for enum comparison (`batch.phase is rising`, `autonomy is full`) + +### 5.2 Grammar + +``` +Expr = OrExpr +OrExpr = AndExpr ("or" AndExpr)* +AndExpr = NotExpr ("and" NotExpr)* +NotExpr = "not" Atom | Atom +Atom = Comparison | EventRef | Quantifier | "(" Expr ")" +Comparison = FieldAccess CompOp Value +CompOp = ">=" | "<=" | ">" | "<" | "==" | "!=" | "is" +FieldAccess = Ident ("." Ident)* +EventRef = "event" "::" Ident +Quantifier = FieldAccess "." ("any" | "all" | "none") "(" Expr ")" +Value = Number | Float | Ident | QualifiedPath | Duration | Time +``` + +### 5.3 Evaluation + +Expressions are not evaluated at parse time. They're stored in the AST as expression trees and compiled into Rust predicates during engine loading. The validator checks that field paths reference real fields and that types are compatible (you can't compare an integer to an enum without `is`). + +For behavior trees, the `!` condition nodes contain expressions: + +``` +! bakery.customer_queue > 0 +! batch.phase is mixing +! need.any is urgent +``` + +For life arc transitions: + +``` +Child -> Adolescent when age >= 13 +Partnered -> Parent when event::child_born +Parent -> Partnered when children.all(age >= 18) +``` + +The `when` clause is an expression. Compound conditions use `and`/`or`: + +``` +Single -> Courting when relationship.any(type is romantic and bond >= 0.3) +``` + +### 5.4 What the Expression Language Is NOT + +It cannot: +- Assign values +- Call functions +- Loop or iterate +- Define variables +- Do arithmetic (no `a + b`, no `stock * 0.5`) + +If something requires computation, it belongs in the engine, not in content. The expression language is purely for **predicates** — boolean questions about world state. + +One exception worth considering: **simple arithmetic in need modifiers and capability modifiers**, like `rate: *1.5` or `threshold: -0.2`. These aren't general expressions — they're specific modifier syntax within condition effect blocks. They use prefix operators (`*` for multiply, `+`/`-` for add/subtract) applied to a single value. This is limited enough to stay in the grammar without opening the door to general computation. + +--- + +## 6. Behavior Tree Syntax + +Behavior trees deserve special attention because they're the most structurally complex construct and they bridge content authoring with engine code. + +### 6.1 Node Types + +| Sigil | Name | Semantics | +|---|---|---| +| `?` | Selector | Try children in order; succeed on first success | +| `>` | Sequence | Run children in order; fail on first failure | +| `!` | Condition | Evaluate an expression; succeed if true | +| `@` | Action | Execute a named engine action | +| `~` | Decorator | Modify child behavior (invert, repeat, cooldown) | + +### 6.2 Syntax + +``` +behavior WorkAtBakery { + applies_to: activity(work) at institution(bakery) + + tree { + ? root { + > handle_urgent { + ! need.any is urgent + @ handle_urgent_need + } + > serve_customers { + ! bakery.customer_queue > 0 + @ move_to(counter) + @ serve_next_customer + } + > continue_batch { + ! bakery.active_batches > 0 + ? batch_phase { + > mixing { ! batch.phase is mixing; @ mix_dough } + > rising { ! batch.phase is rising; @ wait_for_timer } + > baking { ! batch.phase is baking; @ tend_oven } + > done { ! batch.phase is done; @ unload_and_stock } + } + } + ? idle { + @ clean + @ organize + @ idle_wait + } + } + } +} +``` + +Semicolons separate children on a single line. Newlines also separate children. This gives flexibility — simple sequences can be one-liners, complex trees can be multi-line. + +### 6.3 Actions and the Engine Interface + +Action names (`handle_urgent_need`, `mix_dough`, `serve_next_customer`) reference engine-defined behaviors. The engine publishes an **action registry** (another `.sb` file in `schema/`) that lists valid action names with their parameters: + +``` +-- schema/actions.sb +action move_to { + param target: Location + ---description + Navigate the entity to the specified location using pathfinding. + --- +} + +action serve_next_customer { + requires: capability(work) at institution with customer_queue + ---description + Dequeue the next customer and perform a service interaction. + --- +} + +action wait_for_timer { + ---description + Do nothing until the current timed process completes. + --- +} +``` + +The validator checks that every `@` action in a behavior tree matches a registered action. Unknown actions are errors with suggestions. + +### 6.4 Who Authors Behavior Trees + +Realistically, behavior trees are the most engine-facing construct. Sienna will author most of them. But the grammar makes them accessible enough that Lonni could modify existing trees — tweaking priorities, adding idle behaviors, adjusting conditions. The egui tool will represent BTs as a visual node graph with drag-and-drop editing, so she never needs to touch the text syntax directly unless she wants to. + +### 6.5 Subtrees + +Common behavior patterns appear across many trees — "handle urgent need" is in virtually every work behavior, "flee threat" is in every wildlife mode. Rather than duplicating these, the `subtree` construct defines a reusable fragment: + +``` +subtree HandleUrgentNeed { + > handle_urgent { + ! need.any is urgent + @ handle_urgent_need + } +} + +subtree FleeThreat { + > flee { + ! threat.detected + @ flee_from(threat.source) + ! distance_to(threat.source) >= safety_range + } +} +``` + +Subtrees are referenced inside behavior trees with `use_subtree`: + +``` +behavior WorkAtBakery { + applies_to: activity(work) at institution(bakery) + + tree { + ? root { + use_subtree HandleUrgentNeed + > serve_customers { + ! bakery.customer_queue > 0 + @ move_to(counter) + @ serve_next_customer + } + > continue_batch { ... } + ? idle { ... } + } + } +} + +behavior WorkAtClinic { + applies_to: activity(work) at institution(clinic) + + tree { + ? root { + use_subtree HandleUrgentNeed -- same subtree, different context + > treat_patients { ... } + ? idle { ... } + } + } +} +``` + +`use_subtree` inlines the referenced subtree's children at that position in the tree. It's resolved during the merge pass (pass 4), not at parse time — the parser just records it as a reference. The validator checks that the referenced subtree exists and that inlining it doesn't create cycles. + +Subtrees follow the same cross-referencing rules as everything else: they have qualified names, can be imported with `use`, and can live in any `.sb` file. A natural home is alongside the behavior trees that use them: + +``` +storybook/ +└── world/ + └── behaviors/ + ├── common.sb -- subtree HandleUrgentNeed, subtree FleeThreat, etc. + ├── work.sb -- behavior WorkAtBakery, behavior WorkAtClinic + └── wildlife.sb -- behavior DeerBehavior, behavior BirdBehavior +``` + +Subtrees can reference other subtrees (`use_subtree` inside a `subtree`), enabling composition. The resolver detects circular references. + +--- + +## 7. Species and Entity Archetypes + +The RFC describes very different entity types — people, cats, deer, cars, buildings. Rather than having the grammar handle each specially, we use a **species** construct that defines what components an entity type has: + +``` +species Human { + components: [lifecycle, physical_presence, needs, instincts, + roles, autonomy, relationships, membership] + life_arc: PersonArc + needs: HumanNeeds + + ---description + Sapient beings with full agency, social bonds, and institutional + participation. The primary inhabitants of the village. + --- +} + +species Deer { + components: [lifecycle, physical_presence, needs, instincts] + behavior_mode: WildlifeModes -- simplified BT, not full behavior trees + herd: true + + ---description + Wild ungulates. No social structure beyond herding. Flight-driven + threat response. Grazes on foliage, interacts with substrate. + --- +} + +species Building { + components: [lifecycle, physical_presence, operable, stateful] + -- No agency, no needs, no behavior trees + + ---description + Static structures that provide shelter, services, and institutional + anchoring. Age and decay over time via lifecycle. + --- +} +``` + +Then characters reference their species: + +``` +character Martha { + species: Human + age: 34 + ... +} +``` + +The validator uses the species definition to check that the character only includes components appropriate for their species. A Deer character can't have `roles` or `relationships`. A Building can't have `personality`. + +--- + +## 8. Locations + +Locations are referenced everywhere — schedules, behavior trees, relationship commitments. They need their own construct: + +``` +location TownSquare { + position: [50, 32] -- tile coordinates + size: [3, 3] -- in 10m tiles + tags: [public, outdoor, gathering] + + activities: [social, eat, leisure, worship] + capacity: 50 + + ---description + The heart of the village. A cobblestone square with a central fountain, + benches under old oaks, and a small stage for festivals. Market stalls + line the eastern edge on Saturdays. + --- +} + +location Bakery { + position: [48, 30] + size: [2, 1] + tags: [indoor, workplace, commercial] + institution: institutions::Bakery -- links to the institution definition + + activities: [work, eat] + capacity: 8 + + substations { + counter { position: [0, 0], activities: [work] } + oven { position: [1, 0], activities: [work], capabilities: [heat] } + seating { position: [0, 1], activities: [eat, social], capacity: 4 } + } +} +``` + +Locations bridge the spatial world (tile positions, sizes) with the simulation world (what activities happen here, who works here, what institution owns this space). The `substations` block defines finer-grained positions within a location, referenced by behavior trees (`@ move_to(counter)`). + +--- + +## 9. Implementation Architecture + +### 9.1 Crate Structure + +``` +storybook/ -- separate repo (e.g. github.com/aspen-game/storybook) +├── Cargo.toml -- workspace root +├── storybook/ -- core crate: parser, resolver, API, CLI +│ ├── src/ +│ │ ├── lib.rs +│ │ ├── syntax/ +│ │ │ ├── mod.rs +│ │ │ ├── lexer.rs -- logos lexer with prose mode +│ │ │ ├── ast.rs -- AST node types +│ │ │ └── parser.lalrpop -- LALRPOP grammar +│ │ ├── resolve/ +│ │ │ ├── mod.rs +│ │ │ ├── names.rs -- name table, qualified path resolution +│ │ │ ├── merge.rs -- override/remove/append merging +│ │ │ └── validate.rs -- semantic validation +│ │ ├── diagnostics.rs -- error formatting with miette/ariadne +│ │ ├── project.rs -- Project::load(path), filesystem handling +│ │ ├── query.rs -- query API (find characters, list relationships, etc.) +│ │ └── types.rs -- resolved types (what consumers see) +│ ├── src/bin/ +│ │ └── sb.rs -- CLI binary: sb validate, sb query, sb watch +│ └── Cargo.toml +│ +├── storybook-editor/ -- editor crate: egui-based visual editor +│ └── ... -- depends on storybook + eframe, egui, octocrab, git2 +│ +└── schema/ -- default schema .sb files shipped with the crate + ├── core.sb + ├── needs.sb + ├── capabilities.sb + └── activities.sb +``` + +This is its own repository, separate from the Aspen game engine. The workspace contains two crates: the core `storybook` library+CLI and the `storybook-editor` GUI application. + +The `syntax` and `resolve` modules are `pub` for internal organization but the public API surface is `Project::load()`, the query interface, and the resolved types. The CLI is a binary target within the core crate. + +The editor stays in a separate workspace crate because it pulls in heavy GUI dependencies (`eframe`, `egui_node_graph`, `octocrab`, `git2`) that don't belong in the engine's dependency tree. + +The Aspen engine depends on the `storybook` crate via a git dependency or path dependency (during development): + +```toml +# In aspen's Cargo.toml +[dependencies] +storybook = { git = "https://github.com/aspen-game/storybook", branch = "main" } +``` + +If compile times become a problem later, the module boundaries make splitting the core crate into sub-crates a mechanical refactor — but at this scale, one library crate is simpler to maintain and faster to iterate on. + +### 9.2 Lexer Design (logos) + +The lexer needs two modes because of prose blocks: + +**Normal mode:** Standard tokenization — keywords, identifiers, numbers, operators, punctuation. + +**Prose mode:** Activated when the lexer encounters `---` followed by an identifier. Captures everything as raw text until a line containing only `---`. Emits a single `ProseBlock(tag, content)` token. + +```rust +#[derive(Logos, Debug, PartialEq)] +#[logos(skip r"[ \t]+")] // skip whitespace (not newlines) +#[logos(skip r"--[^\n]*")] // skip line comments +enum Token { + // Keywords + #[token("use")] Use, + #[token("character")] Character, + #[token("life_arc")] LifeArc, + #[token("schedule")] Schedule, + #[token("behavior")] Behavior, + #[token("subtree")] Subtree, + #[token("use_subtree")] UseSubtree, + #[token("institution")] Institution, + #[token("condition")] Condition, + #[token("relationship")] Relationship, + #[token("location")] Location, + #[token("species")] Species, + #[token("enum")] Enum, + #[token("traits")] Traits, + #[token("capset")] Capset, + #[token("capability")] Capability, + #[token("need_template")] NeedTemplate, + #[token("link")] Link, + #[token("via")] Via, + #[token("as")] As, + #[token("self")] Self_, + #[token("other")] Other, + #[token("asymmetric")] Asymmetric, + #[token("override")] Override, + #[token("remove")] Remove, + #[token("append")] Append, + #[token("include")] Include, + #[token("when")] When, + #[token("at")] At, + #[token("on")] On, + #[token("to")] To, + #[token("from")] From, + #[token("and")] And, + #[token("or")] Or, + #[token("not")] Not, + #[token("is")] Is, + #[token("true")] True, + #[token("false")] False, + #[token("event")] Event, + #[token("tree")] Tree, + #[token("block")] Block, + #[token("state")] State, + #[token("substates")] Substates, + #[token("transitions")] Transitions, + + // Sigils (behavior tree) + #[token("?")] Selector, + #[token("!")] Condition, + #[token("@")] Action, + #[token(">")] Sequence, + #[token("~")] Decorator, + + // Punctuation + #[token("{")] LBrace, + #[token("}")] RBrace, + #[token("[")] LBracket, + #[token("]")] RBracket, + #[token("(")] LParen, + #[token(")")] RParen, + #[token(":")] Colon, + #[token("::")] PathSep, + #[token(",")] Comma, + #[token(";")] Semi, + #[token("..")] DotDot, + #[token(".")] Dot, + #[token("->")] Arrow, + #[token("<->")] BiArrow, + #[token("*")] Star, + #[token(">=")] Gte, + #[token("<=")] Lte, + #[token("==")] Eq, + #[token("!=")] Neq, + // Note: > and < overlap with Sequence sigil and other uses. + // The parser disambiguates by context (> at start of line in a tree + // block = sequence; > between expressions = comparison). + + #[token("\n")] Newline, + + // Literals + #[regex(r"[0-9]+:[0-9]{2}", |lex| lex.slice().to_string())] + TimeLiteral(String), + + #[regex(r"[0-9]+[dhms]", |lex| lex.slice().to_string())] + DurationLiteral(String), + + #[regex(r"[0-9]+\.[0-9]+", |lex| lex.slice().parse::().unwrap())] + Float(f64), + + #[regex(r"[0-9]+", |lex| lex.slice().parse::().unwrap())] + Integer(i64), + + #[regex(r#""[^"]*""#, |lex| lex.slice()[1..lex.slice().len()-1].to_string())] + StringLiteral(String), + + #[regex(r"[a-zA-Z_][a-zA-Z0-9_]*", |lex| lex.slice().to_string())] + Ident(String), + + // Prose blocks — handled by a wrapper around the logos lexer. + // When the wrapper sees `---` followed by an Ident on the same logical + // line, it switches to raw capture mode. + ProseBlock { tag: String, content: String }, +} +``` + +The prose block handling can't be done purely in logos regex — it needs stateful scanning. The approach: write a thin wrapper `StorybookLexer` that drives `logos` for normal tokens but intercepts `---` sequences and manually scans ahead for prose blocks. LALRPOP's external lexer interface supports this cleanly. + +### 9.3 LALRPOP Considerations + +LALRPOP generates an LALR(1) parser. A few areas need care: + +**The `>` ambiguity.** In behavior tree blocks, `>` is the sequence sigil. In expressions, `>` is a comparison operator. These contexts are syntactically separated — `>` as a sigil only appears at the start of a behavior tree child node, which is inside a `tree { ... }` block. The grammar can handle this by having separate productions for tree nodes vs. expressions. + +**Newlines as separators.** The grammar uses both newlines and semicolons as statement separators within blocks. LALRPOP handles this if newlines are explicit tokens (not skipped). The grammar needs `Sep` productions that accept either. + +**Error recovery.** LALRPOP supports error recovery via `!` in grammar rules, allowing the parser to skip to the next statement on error and continue parsing. This is important for the editor — Lonni wants to see all errors at once, not just the first one. + +### 9.4 Diagnostics + +Error reporting uses the `ariadne` or `miette` crate for rich, colorful error messages: + +``` +error[E0301]: unresolved reference + ┌─ world/characters/martha.sb:14:17 + │ +14 │ susceptible: [Flue, Cold] + │ ^^^^ no condition named `Flue` exists + │ + = help: did you mean `Flu`? (defined in world/conditions/illness.sb) +``` + +``` +error[E0502]: type mismatch in override + ┌─ world/characters/martha.sb:22:27 + │ +22 │ override sleep { decay: "fast" } + │ ^^^^^^ expected float, found string + │ + = note: `decay` is defined as a float in `HumanNeeds` + (world/shared/needs.sb:4) +``` + +``` +warning[W0101]: relationship declared in both files + ┌─ world/characters/david.sb:8:3 + │ + 8 │ link Martha via Spousal { bond: 0.82 } + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + │ + = note: this relationship is already declared in + world/characters/martha.sb:15 + = help: declare a relationship in one file only; it's automatically + visible from both sides. use self/other blocks for + asymmetric values. +``` + +The diagnostic system is a module within the `storybook` crate so it can be reused by both the CLI and the editor. + +--- + +## 10. The Editor (storybook-editor) + +### 10.1 Architecture + +A standalone `eframe` application (not inside Bevy). Depends on the `storybook` crate for parsing, resolution, and validation. Uses the filesystem as the source of truth — every save writes `.sb` files and triggers a re-resolve. + +``` +┌────────────────────────────────────────────────┐ +│ storybook-editor │ +│ │ +│ ┌──────────┐ ┌──────────┐ ┌──────────────┐ │ +│ │ Character│ │ Life Arc│ │ Relationship │ │ +│ │ Editor │ │ Editor │ │ Graph │ │ +│ │ (forms) │ │ (nodes) │ │ (nodes) │ │ +│ └────┬─────┘ └────┬─────┘ └──────┬───────┘ │ +│ │ │ │ │ +│ ┌────┴──────────────┴───────────────┴───────┐ │ +│ │ storybook crate │ │ +│ │ parse ──▶ resolve ──▶ validate ──▶ query │ │ +│ └────────────────────┬──────────────────────┘ │ +│ │ │ +│ ┌────────────────────┴──────────────────────┐ │ +│ │ filesystem (.sb files) │ │ +│ │ + transparent git │ │ +│ └───────────────────────────────────────────┘ │ +└────────────────────────────────────────────────┘ +``` + +### 10.2 Panels + +| Panel | What It Shows | Primary Interaction | +|---|---|---| +| **Character Editor** | Form fields for structured data, embedded text area for prose, portrait slot | Fill in fields, write backstory, link relationships | +| **Life Arc Editor** | Node graph — states as nodes, transitions as edges with `when` conditions | Drag states, draw transition arrows, edit conditions | +| **Schedule Builder** | Timeline/calendar view with draggable time blocks | Drag blocks to resize/reposition, color by activity type | +| **Behavior Tree Editor** | Node graph — tree hierarchy with condition/action nodes | Drag to rearrange, click to edit conditions/actions | +| **Relationship Map** | Force-directed graph — characters as nodes, relationships as edges | Click edges to see/edit details, drag nodes to arrange | +| **Location Browser** | Grid map showing tile positions + detail panel | Click locations, edit properties, link to institutions | +| **Issues & PRs** | GitHub issues and pull requests, linked to storybook entities | Create, comment, close issues; review PRs; cross-link to characters/systems | +| **Diagnostics Panel** | Live error/warning list with click-to-navigate | Click an error to jump to the relevant panel + field | + +### 10.3 Git Integration + +The editor manages git transparently: + +- **Every save** creates a commit with an auto-generated message: `"Update Martha: modified backstory and personality traits"` +- **Branch switching** is exposed as "versions" in the UI — Lonni can create a "what if" branch, make changes, and merge back +- **Undo** is just `git revert` under the hood +- **History** shows a timeline of changes with diffs rendered as "what changed" in plain language, not code diffs + +The git operations use `git2` (libgit2 bindings for Rust). No shell-outs, no git CLI dependency. + +### 10.4 GitHub Integration + +The editor integrates with GitHub so Lonni never has to leave the app for project management: + +| Feature | What It Does | +|---|---| +| **Issues** | Browse, create, edit, close issues. Tag issues to specific characters, locations, or systems. Filter by label, assignee, milestone. | +| **Pull Requests** | View open PRs, see diff summaries in plain language ("Martha's personality changed, new location added"), approve/comment. | +| **Comments** | Comment on issues and PRs inline. Mention characters or systems with `@`-style autocomplete linked to storybook entities. | +| **Labels & Milestones** | Manage labels (e.g., `character`, `institution`, `bug`, `story-beat`) and milestones from within the editor. | + +The GitHub API integration uses `octocrab` (async GitHub client for Rust). Authentication is via a personal access token stored in the system keychain — configured once, never asked again. + +The UI surfaces this as an **Issues** panel alongside the content panels. Issues can be cross-linked to storybook entities: clicking "Related: Martha" in an issue jumps to the character editor. Creating a character or relationship can auto-suggest creating a tracking issue. + +Push/pull operations are exposed as "Sync" — a single button that pushes local commits and pulls remote changes. Merge conflicts (rare, since Lonni is typically the sole content author) are surfaced with plain-language descriptions and resolved in the editor. + +### 10.5 Self-Updating + +The editor binary is distributed as a single executable. Self-update checks a release endpoint on startup (configurable, could be a GitHub release or your own server) and downloads new versions in the background. The update applies on next launch. + +--- + +## 11. Engine Integration + +### 11.1 Loading + +The game engine depends on the `storybook` crate. At startup (or when entering a save), it: + +1. Loads the storybook directory +2. Parses, resolves, validates +3. Converts resolved types into Bevy ECS components and resources +4. Spawns entities with their initial state + +The conversion from `storybook::types::Character` to Bevy components is a straightforward mapping — personality traits become a `Personality` component, needs become a `Needs` component, etc. Life arc definitions become state machine resources. Behavior tree definitions become `BehaviorTree` components. Schedules become `ScheduleTemplate` resources. + +### 11.2 Hot Reloading (Development) + +During development, the engine watches the storybook directory for changes (via `notify` crate). On change: + +1. Re-parse only the changed files +2. Re-run resolution (full, since cross-references may have changed — this should be fast enough for hundreds of files) +3. Diff the resolved state against the current ECS state +4. Apply delta updates to live entities + +This lets Sienna edit a character's personality in a text editor and see the behavior change in real-time without restarting the game. + +### 11.3 Compiled Format + +For release builds, the storybook is compiled into a binary format (MessagePack, bincode, or similar) that skips parsing and resolution entirely. The build pipeline is: `.sb` files → `storybook` crate → resolve → serialize → `.aspen` binary. The game ships the `.aspen` file, not the raw `.sb` files. + +--- + +## 12. Open Design Questions + +### 12.1 Versioning and Migration + +The schema uses **major.minor versioning** with an **additive-only evolution policy**: + +- **Minor versions** (0.1 → 0.2) add new constructs, fields, enum variants, or optional features. Old files remain valid — new fields have defaults, new constructs are simply unused. +- **Major versions** (0.x → 1.0) are reserved for if a breaking change ever becomes unavoidable. We avoid this as long as possible. + +The schema version is tracked in `storybook.toml`: + +```toml +[storybook] +schema_version = "0.1" +``` + +The validator checks compatibility: a storybook authored against schema 0.1 is valid against 0.2 (new features ignored). A storybook authored against 0.2 may use features not present in 0.1 (validator warns if downgrade is attempted). + +If a breaking change becomes necessary in the future, it will ship with a migration guide and a `sb migrate` CLI command. But this is deferred — the schema is small enough and new enough that additive evolution should cover the foreseeable horizon. + +### 12.2 Localization of Prose + +Character backstories, descriptions, and institution flavor text — should they be localizable? If Aspen ships in multiple languages, the prose blocks need a localization story. + +**Option:** Prose blocks become keys into a localization table. The `.sb` file contains the default language; translations live in companion files (`martha.sb.pt`, `martha.sb.es`). The editor shows the default language and flags untranslated content. + +**Recommendation:** Defer. Build the grammar without localization support. Add it as a file-level concern later (companion files, not grammar changes). + +### 12.3 Procedural Generation Integration + +The RFC envisions up to 500 entities. Lonni isn't going to hand-author 500 characters. Some will be procedurally generated from templates. The grammar should support: + +``` +template VillagerTemplate { + species: Human + + personality { + openness: 0.2 .. 0.8 -- random within range + conscientiousness: 0.3 .. 0.9 + warmth: 0.4 .. 0.8 + } + + age: 18 .. 65 + + -- Engine generates a name, backstory, etc. from this template + -- Lonni defines the distributions; the engine instantiates +} +``` + +This is a natural extension of the range syntax. Templates define distributions; the engine samples from them. Hand-authored characters override specific values; procedural characters fill in the gaps. + +**Recommendation:** Include `template` as a construct from the start. It uses the same syntax as `character` but allows ranges where `character` requires concrete values. + +--- + +## 13. Build Plan + +### Phase 1: Grammar + Parser (2 weeks) + +- [ ] Define AST types in `syntax/ast.rs` +- [ ] Implement logos lexer with prose block handling +- [ ] Write LALRPOP grammar for all top-level constructs +- [ ] Write LALRPOP grammar for expression language +- [ ] Write LALRPOP grammar for behavior tree syntax +- [ ] Parse test suite: one `.sb` file per construct, edge cases for expressions and prose blocks +- [ ] Error recovery in parser for multi-error reporting + +**Deliverable:** `syntax` module parses any `.sb` file into an AST. No resolution, no validation. + +### Phase 2: Resolution + Validation (2 weeks) + +- [ ] Name table construction from parsed ASTs +- [ ] `use` statement resolution with wildcard and group support +- [ ] Qualified path resolution with kind checking +- [ ] `link` resolution with bidirectional handling and asymmetric value merging +- [ ] Override merge engine (recursive structural merge, remove, append) +- [ ] `include` expansion for capsets and need templates +- [ ] `use_subtree` inlining with cycle detection +- [ ] Semantic validation (ranges, types, required fields, cross-entity consistency) +- [ ] Diagnostic formatting with `miette` or `ariadne` +- [ ] Fuzzy matching for unresolved name suggestions + +**Deliverable:** `resolve` module. Feed it ASTs, get resolved types or rich error messages. + +### Phase 3: Public API + CLI (1 week) + +- [ ] `Project::load(path)` API +- [ ] Query interface: find characters, list relationships, filter by trait +- [ ] `sb` CLI binary: `sb validate`, `sb query`, `sb inspect` +- [ ] Watch mode for continuous validation (`sb watch`) + +**Deliverable:** Working toolchain. Lonni can start writing `.sb` files and validate them from terminal (with Sienna's help). + +### Phase 4: Schema + Seed Content (1 week) + +- [ ] Write `schema/` files: core enums, need templates, capability sets, action registry +- [ ] Write 3-5 seed characters with full cross-referencing +- [ ] Write 2-3 institutions (bakery, school, household) +- [ ] Write example life arcs, schedules, behavior trees +- [ ] Write example relationships, conditions, locations +- [ ] Validate the whole thing end-to-end + +**Deliverable:** A complete, small storybook that exercises every language feature. Proves the grammar works for real content. + +### Phase 5: Editor MVP (3-4 weeks) + +- [ ] `eframe` app scaffolding with panel layout +- [ ] Character editor: form fields + prose editing +- [ ] Filesystem watching + live re-resolve +- [ ] Diagnostics panel with click-to-navigate +- [ ] Transparent git (auto-commit on save) +- [ ] Relationship map (force-directed graph, click to edit) +- [ ] Life arc editor (node graph) +- [ ] Schedule builder (timeline view) + +**Deliverable:** Lonni can create and edit characters, relationships, and schedules in a visual tool. Behavior trees and advanced constructs come in Phase 6. + +### Phase 6: Editor Completeness + Engine Integration (2-3 weeks) + +- [ ] Behavior tree visual editor (node graph) +- [ ] Location browser with tile map +- [ ] Cross-reference navigation (click a name → jump to its definition) +- [ ] GitHub integration: issues, PRs, comments via `octocrab` +- [ ] Entity cross-linking in issues (tag issues to characters/systems) +- [ ] Engine integration: `storybook` → Bevy ECS component mapping +- [ ] Hot-reload pipeline for development +- [ ] Compiled binary format for release builds + +**Deliverable:** Full pipeline from content authoring to running simulation. Lonni never has to leave the editor for project management. + +--- + +## 14. Risk Assessment + +| Risk | Likelihood | Impact | Mitigation | +|---|---|---|---| +| LALRPOP `>` ambiguity between BT sigil and comparison op | Medium | Low | Separate grammar productions for tree context vs expression context | +| Prose block lexer mode causes edge cases | Medium | Medium | Extensive test suite for weird prose content (contains `---`, contains `{`, etc.) | +| Override merge semantics are underspecified for deep nesting | High | Medium | Define precise merge rules in a spec doc before implementing; property-test the merge engine | +| Lonni finds the file-based workflow frustrating | Medium | High | Prioritize the editor (Phase 5); the CLI is the fallback, not the primary tool | +| Schema evolution breaks existing content | Low (early) | High (later) | Start with additive-only evolution; write migration tools if needed | +| Behavior tree syntax is too terse for Lonni | Low | Low | She'll use the visual editor; text syntax is for Sienna and git diffs | +| 500-entity storybook is slow to resolve | Low | Medium | Profile early; resolution is O(n) in definitions, should be fine for thousands of files | +| Expression language needs arithmetic eventually | Medium | Medium | Design the grammar to be extensible; add operators later if needed | + +--- + +## 15. Summary + +Storybook is a domain-specific language designed around the specific data shapes of Aspen's agent simulation: state machines for life arcs, hierarchical trees for behavior, temporal blocks for schedules, graph structures for relationships, cross-cutting modifiers for conditions, and prose for the narrative soul of the world. + +The cross-referencing system uses filesystem-derived namespaces, `use` imports, qualified paths, bidirectional `link` instantiation, and structural `override` merging. Everything resolves in a multi-pass pipeline that produces rich error messages when things go wrong. + +The implementation is four Rust crates: syntax (parser), resolve (semantics), storybook (public API), and editor (egui tool). The engine consumes the same crate, ensuring the content pipeline is unified from authoring to runtime. + +Lonni gets a visual editor that hides the grammar behind forms, node graphs, and timeline views. Sienna gets a powerful text format that diffs well in git, validates in CI, and maps directly to the simulation architecture. The village gets to live. diff --git a/lefthook.yml b/lefthook.yml new file mode 100644 index 0000000..6b56b36 --- /dev/null +++ b/lefthook.yml @@ -0,0 +1,52 @@ +# lefthook.yml - Git hooks configuration +# https://github.com/evilmartians/lefthook + +# Validate commit messages follow conventional commits format +commit-msg: + commands: + conventional-commit: + run: | + commit_msg=$(cat {1}) + + # Conventional commit pattern + pattern="^(feat|fix|docs|style|refactor|perf|test|chore|build|ci|revert)(\(.+\))?: .{1,}" + + if ! echo "$commit_msg" | grep -qE "$pattern"; then + echo "❌ Invalid commit message format!" + echo "" + echo "Commit message must follow conventional commits:" + echo " (): " + echo "" + echo "Types: feat, fix, docs, style, refactor, perf, test, chore, build, ci, revert" + echo "" + echo "Example:" + echo " feat(auth): add user login endpoint" + echo " fix(ui): resolve button alignment issue" + echo " docs: update README with installation steps" + echo "" + echo "Your message:" + echo " $commit_msg" + exit 1 + fi + +# Run checks before commit +pre-commit: + parallel: true + commands: + fmt: + glob: "*.rs" + run: cargo fmt --check + stage_fixed: true + + clippy: + glob: "*.rs" + run: cargo clippy --workspace --all-targets -- -D warnings + + trailing-whitespace: + glob: "*.{rs,toml,md,yml,yaml}" + run: | + if grep -n '[[:space:]]$' {staged_files}; then + echo "❌ Found trailing whitespace in staged files" + exit 1 + fi + diff --git a/proptest-regressions/resolve/convert_prop_tests.txt b/proptest-regressions/resolve/convert_prop_tests.txt new file mode 100644 index 0000000..291a672 --- /dev/null +++ b/proptest-regressions/resolve/convert_prop_tests.txt @@ -0,0 +1,10 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc a644ac460409a340ff5c7dbcd81d70af69fae9377cb341737664abfc6a40af59 # shrinks to name = "a", field_name = "on", string_val = "" +cc 97f6965a47e710d070b9ab3eb8c9151d2b0eaf511a83a5b2fe51e6d39c3e1dc8 # shrinks to characters = [Character { name: "a", fields: [Field { name: "on", value: Int(0), span: Span { start: 0, end: 10 } }], template: None, span: Span { start: 0, end: 100 } }], enums = [] +cc 1ebabf7786f4c8470a5fc7205572ce1f628372efa9c8642ce66ba2b3f1cae43b # shrinks to characters = [Character { name: "k", fields: [], template: None, span: Span { start: 0, end: 100 } }], enums = [EnumDecl { name: "k", variants: ["_"], span: Span { start: 0, end: 100 } }] +cc 8e01a6f55648de32d55edeade86fff607f0080045ea6155343a0400ca7a7140e # shrinks to characters = [Character { name: "_", fields: [], template: None, span: Span { start: 0, end: 100 } }, Character { name: "_", fields: [], template: None, span: Span { start: 0, end: 100 } }], use_count = 0 diff --git a/proptest-regressions/resolve/links_prop_tests.txt b/proptest-regressions/resolve/links_prop_tests.txt new file mode 100644 index 0000000..433f645 --- /dev/null +++ b/proptest-regressions/resolve/links_prop_tests.txt @@ -0,0 +1,7 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc f001e5854731fe83d1c75e58f0519043c413356fcad528eb53a5337077bcfafa # shrinks to (rel1, rel2) = (Relationship { name: "a", participants: [Participant { role: None, name: ["_"], self_block: Some([]), other_block: None, span: Span { start: 0, end: 10 } }, Participant { role: None, name: ["a"], self_block: None, other_block: None, span: Span { start: 0, end: 10 } }], fields: [Field { name: "_", value: Int(0), span: Span { start: 0, end: 10 } }, Field { name: "_", value: Int(1), span: Span { start: 0, end: 10 } }], span: Span { start: 0, end: 10 } }, Relationship { name: "a", participants: [Participant { role: None, name: ["a"], self_block: Some([]), other_block: None, span: Span { start: 20, end: 30 } }, Participant { role: None, name: ["_"], self_block: None, other_block: None, span: Span { start: 20, end: 30 } }], fields: [Field { name: "_", value: Int(0), span: Span { start: 0, end: 10 } }, Field { name: "_", value: Int(1), span: Span { start: 0, end: 10 } }], span: Span { start: 20, end: 30 } }) diff --git a/proptest-regressions/resolve/validate_prop_tests.txt b/proptest-regressions/resolve/validate_prop_tests.txt new file mode 100644 index 0000000..5538cfe --- /dev/null +++ b/proptest-regressions/resolve/validate_prop_tests.txt @@ -0,0 +1,7 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc 5ee477ca84df23fbe1a1ecfd0b8b9216539ca7a2f766e8a82f86de31878015dd # shrinks to tree_name = "aaa", action_name = "aaa" diff --git a/proptest-regressions/syntax/prop_tests.txt b/proptest-regressions/syntax/prop_tests.txt new file mode 100644 index 0000000..72ac4cf --- /dev/null +++ b/proptest-regressions/syntax/prop_tests.txt @@ -0,0 +1,8 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc 8ac445fa78ef3f5ec7fb7d096cbe589988a9478352f82cdac195f5cea57ec47a # shrinks to name = "A", tag = "A", content = "\n¡" +cc 739a6de85e6f514f93fc2d077e929658b31c65294dd44b192972ed882a42171a # shrinks to name = "A", tag = "in", content = "" diff --git a/rustfmt.toml b/rustfmt.toml new file mode 100644 index 0000000..4043bce --- /dev/null +++ b/rustfmt.toml @@ -0,0 +1,24 @@ +binop_separator = "Back" +brace_style = "PreferSameLine" +control_brace_style = "AlwaysSameLine" +comment_width = 80 +edition = "2021" +enum_discrim_align_threshold = 40 +fn_params_layout = "Tall" +fn_single_line = false +force_explicit_abi = true +force_multiline_blocks = false +format_code_in_doc_comments = true +format_macro_matchers = true +format_macro_bodies = true +hex_literal_case = "Lower" +imports_indent = "Block" +imports_layout = "Vertical" +match_arm_leading_pipes = "Always" +match_block_trailing_comma = true +imports_granularity = "Crate" +normalize_doc_attributes = true +reorder_impl_items = true +reorder_imports = true +group_imports = "StdExternalCrate" +wrap_comments = true \ No newline at end of file diff --git a/src/bin/sb.rs b/src/bin/sb.rs new file mode 100644 index 0000000..feb51e8 --- /dev/null +++ b/src/bin/sb.rs @@ -0,0 +1,188 @@ +//! Storybook CLI tool +//! +//! Commands: +//! - `sb validate ` - Parse and validate entire project +//! - `sb inspect ` - Show fully resolved entity details +//! - `sb watch ` - Continuous validation on file changes + +use std::path::PathBuf; + +use clap::{ + Parser, + Subcommand, +}; +use miette::{ + IntoDiagnostic, + Result, +}; +use storybook::Project; + +#[derive(Parser)] +#[command(name = "sb")] +#[command(about = "Storybook DSL tool", long_about = None)] +struct Cli { + #[command(subcommand)] + command: Commands, +} + +#[derive(Subcommand)] +enum Commands { + /// Validate a storybook project or file + Validate { + /// Path to a .sb file or directory containing .sb files + #[arg(default_value = ".")] + path: PathBuf, + }, + + /// Inspect a specific entity + Inspect { + /// Entity name to inspect + name: String, + + /// Path to the storybook project directory + #[arg(short, long, default_value = ".")] + path: PathBuf, + }, + + /// Watch a project for changes and re-validate + Watch { + /// Path to the storybook project directory + #[arg(default_value = ".")] + path: PathBuf, + }, +} + +fn main() -> Result<()> { + let cli = Cli::parse(); + + match cli.command { + | Commands::Validate { path } => validate(&path)?, + | Commands::Inspect { name, path } => inspect(&name, &path)?, + | Commands::Watch { path } => watch(&path)?, + } + + Ok(()) +} + +fn validate(path: &PathBuf) -> Result<()> { + println!("Validating storybook at: {}", path.display()); + + let project = Project::load(path)?; + + let char_count = project.characters().count(); + let rel_count = project.relationships().count(); + let inst_count = project.institutions().count(); + let sched_count = project.schedules().count(); + let behavior_count = project.behaviors().count(); + let arc_count = project.life_arcs().count(); + + println!("✓ Validation successful!"); + println!(); + println!("Project contents:"); + println!(" Characters: {}", char_count); + println!(" Relationships: {}", rel_count); + println!(" Institutions: {}", inst_count); + println!(" Schedules: {}", sched_count); + println!(" Behaviors: {}", behavior_count); + println!(" Life Arcs: {}", arc_count); + + Ok(()) +} + +fn inspect(name: &str, path: &PathBuf) -> Result<()> { + println!("Loading project from: {}", path.display()); + + let project = Project::load(path)?; + + // Try to find the entity as different types + if let Some(character) = project.find_character(name) { + println!("Character: {}", character.name); + println!("Fields:"); + for (field_name, value) in &character.fields { + println!(" {}: {:?}", field_name, value); + } + println!("Prose blocks:"); + for (tag, prose) in &character.prose_blocks { + println!(" ---{}", tag); + println!("{}", prose.content); + println!(" ---"); + } + return Ok(()); + } + + if let Some(relationship) = project.find_relationship(name) { + println!("Relationship: {}", relationship.name); + println!("Participants:"); + for participant in &relationship.participants { + println!(" {}", participant.name.join("::")); + } + println!("Fields:"); + for (field_name, value) in &relationship.fields { + println!(" {}: {:?}", field_name, value); + } + return Ok(()); + } + + if let Some(institution) = project.find_institution(name) { + println!("Institution: {}", institution.name); + println!("Fields:"); + for (field_name, value) in &institution.fields { + println!(" {}: {:?}", field_name, value); + } + return Ok(()); + } + + println!("Entity '{}' not found in project", name); + Ok(()) +} + +fn watch(path: &PathBuf) -> Result<()> { + use std::sync::mpsc::channel; + + use notify::{ + Event, + EventKind, + RecursiveMode, + Watcher, + }; + + println!("Watching for changes in: {}", path.display()); + println!("Press Ctrl+C to stop"); + println!(); + + // Initial validation + match Project::load(path) { + | Ok(_) => println!("✓ Initial validation successful"), + | Err(e) => println!("✗ Initial validation failed: {}", e), + } + + let (tx, rx) = channel::>(); + + let mut watcher = notify::recommended_watcher(tx).into_diagnostic()?; + watcher + .watch(path, RecursiveMode::Recursive) + .into_diagnostic()?; + + for res in rx { + match res { + | Ok(event) => { + // Only re-validate on write events for .sb files + if matches!(event.kind, EventKind::Modify(_) | EventKind::Create(_)) && + event + .paths + .iter() + .any(|p| p.extension().and_then(|s| s.to_str()) == Some("sb")) + { + println!("\n--- Change detected, re-validating... ---"); + match Project::load(path) { + | Ok(_) => println!("✓ Validation successful"), + | Err(e) => println!("✗ Validation failed: {}", e), + } + } + }, + | Err(e) => println!("Watch error: {:?}", e), + } + } + + Ok(()) +} diff --git a/src/error_showcase_tests.rs b/src/error_showcase_tests.rs new file mode 100644 index 0000000..61d776b --- /dev/null +++ b/src/error_showcase_tests.rs @@ -0,0 +1,506 @@ +//! Functional tests that showcase every error type with its helpful message +//! +//! These tests are designed to: +//! 1. Ensure every error type can be triggered +//! 2. Document what causes each error +//! 3. Verify that error messages are helpful and clear + +use std::collections::HashSet; + +use crate::{ + resolve::{ + convert::convert_file, + names::NameTable, + validate::{ + validate_behavior_tree_actions, + validate_life_arc_transitions, + validate_relationship_bonds, + validate_schedule_overlaps, + validate_trait_ranges, + }, + ErrorCollector, + ResolveError, + }, + syntax::{ + ast::*, + lexer::Lexer, + FileParser, + }, + Project, +}; + +// ===== Parse Errors ===== + +#[test] +fn test_unexpected_token_error() { + let source = r#" + character Martha { + age 34 + } + "#; + // Missing colon after 'age' - should trigger UnexpectedToken + + let lexer = Lexer::new(source); + let result = FileParser::new().parse(lexer); + + assert!(result.is_err(), "Should fail with unexpected token"); + println!("\n=== UnexpectedToken Error ==="); + if let Err(e) = result { + println!("{:?}", e); + } +} + +#[test] +fn test_unexpected_eof_error() { + let source = r#" + character Martha { + age: 34 + "#; + // Missing closing brace - should trigger UnexpectedEof + + let lexer = Lexer::new(source); + let result = FileParser::new().parse(lexer); + + assert!(result.is_err(), "Should fail with unexpected EOF"); + println!("\n=== UnexpectedEof Error ==="); + if let Err(e) = result { + println!("{:?}", e); + } +} + +#[test] +fn test_invalid_token_error() { + let source = "character Martha { age: @#$ }"; + // Invalid character sequence - should trigger InvalidToken + + let lexer = Lexer::new(source); + let result = FileParser::new().parse(lexer); + + assert!(result.is_err(), "Should fail with invalid token"); + println!("\n=== InvalidToken Error ==="); + if let Err(e) = result { + println!("{:?}", e); + } +} + +#[test] +fn test_unclosed_prose_block_error() { + let source = r#" + character Martha { + backstory: ---backstory +This is Martha's backstory. +It goes on and on... +But it never closes! + } + "#; + // Prose block never closed - should trigger UnclosedProseBlock + + let lexer = Lexer::new(source); + let result = FileParser::new().parse(lexer); + + assert!(result.is_err(), "Should fail with unclosed prose block"); + println!("\n=== UnclosedProseBlock Error ==="); + if let Err(e) = result { + println!("{:?}", e); + } +} + +// ===== Resolution Errors ===== + +#[test] +fn test_name_not_found_error() { + let file = File { + declarations: vec![], + }; + + let table = NameTable::from_file(&file).unwrap(); + let result = table.lookup(&["NonExistent".to_string()]); + + assert!(result.is_none(), "Should not find non-existent name"); + + // Create the actual error + let error = ResolveError::NameNotFound { + name: "NonExistent".to_string(), + suggestion: table.find_suggestion("NonExistent"), + }; + + println!("\n=== NameNotFound Error ==="); + println!("{:?}", error); +} + +#[test] +fn test_duplicate_definition_error() { + let file = File { + declarations: vec![ + Declaration::Character(Character { + name: "Martha".to_string(), + fields: vec![], + template: None, + span: Span::new(0, 10), + }), + Declaration::Character(Character { + name: "Martha".to_string(), + fields: vec![], + template: None, + span: Span::new(20, 30), + }), + ], + }; + + let result = NameTable::from_file(&file); + + assert!(result.is_err(), "Should fail with duplicate definition"); + println!("\n=== DuplicateDefinition Error ==="); + if let Err(e) = result { + println!("{:?}", e); + } +} + +#[test] +fn test_circular_dependency_error() { + // Manually create a circular dependency error for demonstration + let error = ResolveError::CircularDependency { + cycle: "Template A -> Template B -> Template A".to_string(), + }; + + println!("\n=== CircularDependency Error ==="); + println!("{:?}", error); +} + +#[test] +fn test_invalid_field_access_error() { + let error = ResolveError::InvalidFieldAccess { + message: "Field 'nonexistent' does not exist on character 'Martha'".to_string(), + }; + + println!("\n=== InvalidFieldAccess Error ==="); + println!("{:?}", error); +} + +#[test] +fn test_type_mismatch_error() { + let error = ResolveError::TypeMismatch { + message: "Expected number for field 'age', but got string \"thirty\"".to_string(), + }; + + println!("\n=== TypeMismatch Error ==="); + println!("{:?}", error); +} + +#[test] +fn test_validation_error_generic() { + let error = ResolveError::ValidationError { + message: "Cannot append field 'age': field already exists".to_string(), + help: Some("The 'append' operation is used to add new fields. Use 'set' to update existing fields.".to_string()), + }; + + println!("\n=== ValidationError Error ==="); + println!("{:?}", error); +} + +// ===== Validation Errors ===== + +#[test] +fn test_unknown_life_arc_state_error() { + let life_arc = LifeArc { + name: "Growth".to_string(), + states: vec![ + ArcState { + name: "child".to_string(), + transitions: vec![Transition { + to: "adult".to_string(), // 'adult' exists + condition: Expr::BoolLit(true), + span: Span::new(0, 10), + }], + span: Span::new(0, 50), + }, + ArcState { + name: "adult".to_string(), + transitions: vec![Transition { + to: "senior".to_string(), // 'senior' doesn't exist! + condition: Expr::BoolLit(true), + span: Span::new(50, 60), + }], + span: Span::new(50, 100), + }, + ], + span: Span::new(0, 100), + }; + + let mut collector = ErrorCollector::new(); + validate_life_arc_transitions(&life_arc, &mut collector); + + assert!(collector.has_errors(), "Should fail with unknown state"); + println!("\n=== UnknownLifeArcState Error ==="); + if collector.has_errors() { + let result = collector.into_result(()); + if let Err(e) = result { + println!("{:?}", e); + } + } +} + +#[test] +fn test_trait_out_of_range_error_bond() { + let fields = vec![Field { + name: "bond".to_string(), + value: Value::Float(1.5), // Out of range! + span: Span::new(0, 10), + }]; + + let mut collector = ErrorCollector::new(); + validate_trait_ranges(&fields, &mut collector); + + assert!( + collector.has_errors(), + "Should fail with out of range trait" + ); + println!("\n=== TraitOutOfRange Error (bond too high) ==="); + if collector.has_errors() { + let result = collector.into_result(()); + if let Err(e) = result { + println!("{:?}", e); + } + } +} + +#[test] +fn test_trait_out_of_range_error_age() { + let fields = vec![Field { + name: "age".to_string(), + value: Value::Int(200), // Out of range! + span: Span::new(0, 10), + }]; + + let mut collector = ErrorCollector::new(); + validate_trait_ranges(&fields, &mut collector); + + assert!(collector.has_errors(), "Should fail with out of range age"); + println!("\n=== TraitOutOfRange Error (age too high) ==="); + if collector.has_errors() { + let result = collector.into_result(()); + if let Err(e) = result { + println!("{:?}", e); + } + } +} + +#[test] +fn test_trait_out_of_range_negative() { + let fields = vec![Field { + name: "trust".to_string(), + value: Value::Float(-0.2), // Negative! + span: Span::new(0, 10), + }]; + + let mut collector = ErrorCollector::new(); + validate_trait_ranges(&fields, &mut collector); + + assert!(collector.has_errors(), "Should fail with negative trait"); + println!("\n=== TraitOutOfRange Error (negative value) ==="); + if collector.has_errors() { + let result = collector.into_result(()); + if let Err(e) = result { + println!("{:?}", e); + } + } +} + +#[test] +fn test_schedule_overlap_error() { + let schedule = Schedule { + name: "DailyRoutine".to_string(), + blocks: vec![ + ScheduleBlock { + activity: "work".to_string(), + start: Time { + hour: 8, + minute: 0, + second: 0, + }, + end: Time { + hour: 12, + minute: 30, + second: 0, + }, + span: Span::new(0, 50), + }, + ScheduleBlock { + activity: "lunch".to_string(), + start: Time { + hour: 12, + minute: 0, // Overlaps with work! + second: 0, + }, + end: Time { + hour: 13, + minute: 0, + second: 0, + }, + span: Span::new(50, 100), + }, + ], + span: Span::new(0, 100), + }; + + let mut collector = ErrorCollector::new(); + validate_schedule_overlaps(&schedule, &mut collector); + + assert!(collector.has_errors(), "Should fail with schedule overlap"); + println!("\n=== ScheduleOverlap Error ==="); + if collector.has_errors() { + let result = collector.into_result(()); + if let Err(e) = result { + println!("{:?}", e); + } + } +} + +#[test] +fn test_unknown_behavior_action_error() { + let tree = Behavior { + name: "WorkDay".to_string(), + root: BehaviorNode::Action("unknown_action".to_string(), vec![]), + span: Span::new(0, 50), + }; + + // Create a registry with some known actions (but not "unknown_action") + let mut action_registry = HashSet::new(); + action_registry.insert("walk".to_string()); + action_registry.insert("work".to_string()); + action_registry.insert("eat".to_string()); + + let mut collector = ErrorCollector::new(); + validate_behavior_tree_actions(&tree, &action_registry, &mut collector); + + assert!(collector.has_errors(), "Should fail with unknown action"); + println!("\n=== UnknownBehaviorAction Error ==="); + if collector.has_errors() { + let result = collector.into_result(()); + if let Err(e) = result { + println!("{:?}", e); + } + } +} + +#[test] +fn test_relationship_bond_out_of_range() { + let relationship = Relationship { + name: "Test".to_string(), + participants: vec![], + fields: vec![Field { + name: "bond".to_string(), + value: Value::Float(2.5), // Way out of range! + span: Span::new(0, 10), + }], + span: Span::new(0, 50), + }; + + let mut collector = ErrorCollector::new(); + validate_relationship_bonds(&[relationship], &mut collector); + + assert!(collector.has_errors(), "Should fail with bond out of range"); + println!("\n=== Relationship Bond Out of Range ==="); + if collector.has_errors() { + let result = collector.into_result(()); + if let Err(e) = result { + println!("{:?}", e); + } + } +} + +#[test] +fn test_duplicate_field_in_convert() { + let character = Character { + name: "Martha".to_string(), + fields: vec![ + Field { + name: "age".to_string(), + value: Value::Int(34), + span: Span::new(0, 10), + }, + Field { + name: "age".to_string(), // Duplicate! + value: Value::Int(35), + span: Span::new(10, 20), + }, + ], + template: None, + span: Span::new(0, 50), + }; + + let file = File { + declarations: vec![Declaration::Character(character)], + }; + + let result = convert_file(&file); + + assert!(result.is_err(), "Should fail with duplicate field"); + println!("\n=== Duplicate Field Error (in conversion) ==="); + if let Err(e) = result { + println!("{:?}", e); + } +} + +// ===== Project Errors ===== + +#[test] +fn test_invalid_project_structure_no_directory() { + let result = Project::load("/nonexistent/path/to/project"); + + assert!(result.is_err(), "Should fail with invalid structure"); + println!("\n=== InvalidStructure Error (directory doesn't exist) ==="); + if let Err(e) = result { + println!("{:?}", e); + } +} + +#[test] +fn test_invalid_project_structure_not_directory() { + // Try to load a file as if it were a directory + let result = Project::load("Cargo.toml"); + + assert!(result.is_err(), "Should fail - file not directory"); + println!("\n=== InvalidStructure Error (not a directory) ==="); + if let Err(e) = result { + println!("{:?}", e); + } +} + +// ===== Showcase All Errors ===== + +#[test] +#[ignore] // Run with: cargo test error_showcase -- --ignored --nocapture +fn error_showcase_all() { + println!("\n\n"); + println!("╔════════════════════════════════════════════════════════════════╗"); + println!("║ STORYBOOK ERROR MESSAGES SHOWCASE ║"); + println!("║ Every error type with helpful hints for users ║"); + println!("╚════════════════════════════════════════════════════════════════╝"); + + test_unexpected_token_error(); + test_unexpected_eof_error(); + test_invalid_token_error(); + test_unclosed_prose_block_error(); + test_name_not_found_error(); + test_duplicate_definition_error(); + test_circular_dependency_error(); + test_invalid_field_access_error(); + test_type_mismatch_error(); + test_validation_error_generic(); + test_unknown_life_arc_state_error(); + test_trait_out_of_range_error_bond(); + test_trait_out_of_range_error_age(); + test_trait_out_of_range_negative(); + test_schedule_overlap_error(); + test_unknown_behavior_action_error(); + test_relationship_bond_out_of_range(); + test_duplicate_field_in_convert(); + test_invalid_project_structure_no_directory(); + test_invalid_project_structure_not_directory(); + + println!("\n\n"); + println!("╔════════════════════════════════════════════════════════════════╗"); + println!("║ SHOWCASE COMPLETE ║"); + println!("╚════════════════════════════════════════════════════════════════╝"); +} diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..74e0145 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,418 @@ +//! Storybook - A DSL for authoring narrative content for agent simulations +//! +//! This library provides parsing, resolution, and validation for `.sb` files. +//! +//! # Example +//! +//! ```no_run +//! use storybook::{ +//! query::CharacterQuery, +//! Project, +//! }; +//! +//! // Load and validate a storybook project +//! let project = Project::load("path/to/storybook")?; +//! +//! // Query for characters +//! for character in project.characters() { +//! println!("Character: {}", character.name); +//! } +//! +//! // Filter characters by age and traits +//! for character in project +//! .characters() +//! .with_age_range(25, 40) +//! .with_trait("trust", 0.7, 1.0) +//! { +//! println!("Trusted character: {}", character.name); +//! } +//! +//! # Ok::<(), Box>(()) +//! ``` + +// Suppress false positive warnings from thiserror macro +#![allow(unused_assignments)] + +pub mod query; +pub mod resolve; +pub mod syntax; +pub mod types; + +#[cfg(test)] +mod error_showcase_tests; + +use std::{ + collections::HashSet, + path::{ + Path, + PathBuf, + }, +}; + +use miette::Diagnostic; +pub use resolve::{ + NameTable, + QualifiedPath, +}; +use thiserror::Error; +pub use types::*; + +use crate::{ + resolve::validate, + syntax::{ + ast::File, + FileParser, + }, +}; + +/// Errors that can occur when working with projects +#[derive(Error, Debug, Diagnostic)] +pub enum ProjectError { + #[error("Failed to parse file: {path}")] + #[diagnostic(help("There's a syntax error in this file. Check the error details above for the specific issue. Common problems: missing braces, unclosed strings, incorrect syntax for declarations."))] + #[allow(dead_code)] + ParseError { + path: String, + #[source] + source: Box, + }, + + #[error(transparent)] + #[diagnostic(transparent)] + ResolveError(#[from] resolve::ResolveError), + + #[error("IO error reading files: {0}")] + #[diagnostic(help("There was a problem reading or writing files. Check that: the file exists, you have permission to read it, the path is correct, and there's enough disk space."))] + IoError(#[from] std::io::Error), + + #[error("Invalid project structure: {0}")] + #[diagnostic(help("The project directory structure is not valid. Make sure: the path exists and is a directory, it contains at least one .sb file, you have permission to access it."))] + InvalidStructure(String), +} + +/// Result type for project operations +pub type Result = std::result::Result; + +/// A loaded and validated Storybook project +#[derive(Debug, Clone)] +pub struct Project { + /// Root path of the project (either a file or directory) + pub root: PathBuf, + /// All resolved files in the project + pub files: Vec, + /// Combined name table across all files + pub name_table: NameTable, +} + +impl Project { + /// Load a storybook project from a file or directory + /// + /// This will: + /// 1. Find all `.sb` files (or load the single file if path is a file) + /// 2. Parse each file + /// 3. Build a combined name table + /// 4. Resolve all cross-references + /// 5. Validate semantic constraints + /// + /// # Errors + /// + /// Returns an error if: + /// - The path doesn't exist + /// - Any `.sb` file fails to parse + /// - Name resolution fails (undefined references, duplicates, etc.) + /// - Semantic validation fails (invalid ranges, overlaps, etc.) + pub fn load>(root: P) -> Result { + let root = root.as_ref().to_path_buf(); + + if !root.exists() { + return Err(ProjectError::InvalidStructure(format!( + "Path does not exist: {}", + root.display() + ))); + } + + // Handle both single files and directories + let sb_files = if root.is_file() { + // Single file - validate it's a .sb file + if root.extension().and_then(|s| s.to_str()) != Some("sb") { + return Err(ProjectError::InvalidStructure(format!( + "File must have .sb extension: {}", + root.display() + ))); + } + vec![root.clone()] + } else if root.is_dir() { + // Directory - find all .sb files + Self::find_sb_files(&root)? + } else { + return Err(ProjectError::InvalidStructure(format!( + "Path is neither a file nor a directory: {}", + root.display() + ))); + }; + + if sb_files.is_empty() { + return Err(ProjectError::InvalidStructure( + "No .sb files found in project".to_string(), + )); + } + + // Parse all files + let mut parsed_files = Vec::new(); + for path in &sb_files { + let file = Self::parse_file(path)?; + parsed_files.push(file); + } + + // Build combined name table from all files + let name_table = NameTable::from_files(&parsed_files)?; + + // Build action registry from schema if it exists + let action_registry = Self::build_action_registry(&root)?; + + // Validate and convert all files + let mut resolved_files = Vec::new(); + + for file in parsed_files { + // First validate + validate::validate_file(&file, &action_registry)?; + + // Then convert AST to resolved types + let declarations = resolve::convert::convert_file(&file)?; + + resolved_files.push(ResolvedFile { declarations }); + } + + Ok(Project { + root, + files: resolved_files, + name_table, + }) + } + + /// Parse a single .sb file + fn parse_file(path: &Path) -> Result { + let content = std::fs::read_to_string(path)?; + let lexer = syntax::lexer::Lexer::new(&content); + + FileParser::new() + .parse(lexer) + .map_err(|e| ProjectError::ParseError { + path: path.display().to_string(), + source: Box::new(std::io::Error::new( + std::io::ErrorKind::InvalidData, + format!("Parse error: {:?}", e), + )), + }) + } + + /// Find all .sb files in a directory recursively + fn find_sb_files(root: &Path) -> Result> { + let mut files = Vec::new(); + + for entry in walkdir::WalkDir::new(root) + .follow_links(true) + .into_iter() + .filter_map(|e| e.ok()) + { + let path = entry.path(); + if path.extension().and_then(|s| s.to_str()) == Some("sb") { + files.push(path.to_path_buf()); + } + } + + Ok(files) + } + + /// Build action registry from schema files if they exist + /// + /// Looks for `schema/actions.sb` and extracts action names from `enum + /// Action` declaration. If no schema exists, returns empty registry (no + /// validation). + /// + /// # Future Tooling + /// + /// TODO: Add commands to help maintain the Action enum: + /// - `sb actions check` - find actions used in behavior trees but not in + /// enum + /// - `sb actions suggest` - auto-generate/update enum Action from usage + /// - `sb actions unused` - find enum variants never used in behavior trees + fn build_action_registry(root: &Path) -> Result> { + let schema_path = if root.is_file() { + // If root is a file, look for schema in parent directory + root.parent().map(|p| p.join("schema").join("actions.sb")) + } else { + // If root is a directory, look for schema subdirectory + Some(root.join("schema").join("actions.sb")) + }; + + let schema_path = match schema_path { + | Some(path) if path.exists() => path, + | _ => return Ok(HashSet::new()), // No schema, return empty registry + }; + + // Parse the schema file + let file = Self::parse_file(&schema_path)?; + + // Find enum Action declaration and extract variants + let mut registry = HashSet::new(); + for decl in &file.declarations { + if let syntax::ast::Declaration::Enum(enum_decl) = decl { + if enum_decl.name == "Action" { + for variant in &enum_decl.variants { + registry.insert(variant.clone()); + } + } + } + } + + Ok(registry) + } + + /// Get all characters across all files + pub fn characters(&self) -> impl Iterator { + self.files.iter().flat_map(|f| f.characters()) + } + + /// Get all relationships across all files + pub fn relationships(&self) -> impl Iterator { + self.files.iter().flat_map(|f| f.relationships()) + } + + /// Get all institutions across all files + pub fn institutions(&self) -> impl Iterator { + self.files.iter().flat_map(|f| f.institutions()) + } + + /// Get all schedules across all files + pub fn schedules(&self) -> impl Iterator { + self.files.iter().flat_map(|f| f.schedules()) + } + + /// Get all behavior trees across all files + pub fn behaviors(&self) -> impl Iterator { + self.files.iter().flat_map(|f| f.behaviors()) + } + + /// Get all life arcs across all files + pub fn life_arcs(&self) -> impl Iterator { + self.files.iter().flat_map(|f| f.life_arcs()) + } + + /// Get all locations across all files + pub fn locations(&self) -> impl Iterator { + self.files.iter().flat_map(|f| f.locations()) + } + + /// Get all species across all files + pub fn species(&self) -> impl Iterator { + self.files.iter().flat_map(|f| f.species()) + } + + /// Get all enums across all files + pub fn enums(&self) -> impl Iterator { + self.files.iter().flat_map(|f| f.enums()) + } + + /// Find a character by name + pub fn find_character(&self, name: &str) -> Option<&ResolvedCharacter> { + self.characters().find(|c| c.name == name) + } + + /// Find a relationship by name + pub fn find_relationship(&self, name: &str) -> Option<&ResolvedRelationship> { + self.relationships().find(|r| r.name == name) + } + + /// Find an institution by name + pub fn find_institution(&self, name: &str) -> Option<&ResolvedInstitution> { + self.institutions().find(|i| i.name == name) + } +} + +#[cfg(test)] +mod tests { + use std::fs; + + use tempfile::TempDir; + + use super::*; + + #[test] + fn test_build_action_registry_no_schema() { + let dir = TempDir::new().unwrap(); + + let registry = Project::build_action_registry(dir.path()).unwrap(); + + assert!( + registry.is_empty(), + "Registry should be empty when no schema exists" + ); + } + + #[test] + fn test_build_action_registry_with_schema() { + let dir = TempDir::new().unwrap(); + let schema_dir = dir.path().join("schema"); + fs::create_dir(&schema_dir).unwrap(); + + fs::write( + schema_dir.join("actions.sb"), + "enum Action { walk, work, eat, sleep }", + ) + .unwrap(); + + let registry = Project::build_action_registry(dir.path()).unwrap(); + + assert_eq!(registry.len(), 4); + assert!(registry.contains("walk")); + assert!(registry.contains("work")); + assert!(registry.contains("eat")); + assert!(registry.contains("sleep")); + assert!(!registry.contains("unknown")); + } + + #[test] + fn test_build_action_registry_from_file_path() { + let dir = TempDir::new().unwrap(); + let schema_dir = dir.path().join("schema"); + fs::create_dir(&schema_dir).unwrap(); + + fs::write(schema_dir.join("actions.sb"), "enum Action { walk, work }").unwrap(); + + // Create a test .sb file in the directory + let test_file = dir.path().join("test.sb"); + fs::write(&test_file, "character Test { age: 30 }").unwrap(); + + // Pass the file path - should look for schema in parent directory + let registry = Project::build_action_registry(&test_file).unwrap(); + + assert_eq!(registry.len(), 2); + assert!(registry.contains("walk")); + assert!(registry.contains("work")); + } + + #[test] + fn test_build_action_registry_ignores_other_enums() { + let dir = TempDir::new().unwrap(); + let schema_dir = dir.path().join("schema"); + fs::create_dir(&schema_dir).unwrap(); + + fs::write( + schema_dir.join("actions.sb"), + r#" +enum Action { walk, work } +enum OtherEnum { foo, bar, baz } + "#, + ) + .unwrap(); + + let registry = Project::build_action_registry(dir.path()).unwrap(); + + // Should only contain Action enum variants + assert_eq!(registry.len(), 2); + assert!(registry.contains("walk")); + assert!(registry.contains("work")); + assert!(!registry.contains("foo")); + } +} diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..e7a11a9 --- /dev/null +++ b/src/main.rs @@ -0,0 +1,3 @@ +fn main() { + println!("Hello, world!"); +} diff --git a/src/query.rs b/src/query.rs new file mode 100644 index 0000000..1bdceda --- /dev/null +++ b/src/query.rs @@ -0,0 +1,288 @@ +//! Query interface for filtering and searching entities +//! +//! This module provides convenient methods for querying entities in a storybook +//! project. You can filter by various criteria like traits, age ranges, field +//! values, etc. + +use crate::{ + syntax::ast::Value, + types::*, +}; + +/// Extension methods for querying characters +pub trait CharacterQuery<'a> { + /// Filter characters by age range + fn with_age_range( + self, + min: i64, + max: i64, + ) -> Box + 'a>; + + /// Filter characters by trait value + fn with_trait( + self, + trait_name: &'a str, + min: f64, + max: f64, + ) -> Box + 'a>; + + /// Filter characters that have a specific field + fn with_field( + self, + field_name: &'a str, + ) -> Box + 'a>; + + /// Filter characters by field value + fn with_field_value( + self, + field_name: &'a str, + value: Value, + ) -> Box + 'a>; +} + +impl<'a, I> CharacterQuery<'a> for I +where + I: Iterator + 'a, +{ + fn with_age_range( + self, + min: i64, + max: i64, + ) -> Box + 'a> { + Box::new(self.filter(move |c| { + if let Some(Value::Int(age)) = c.fields.get("age") { + *age >= min && *age <= max + } else { + false + } + })) + } + + fn with_trait( + self, + trait_name: &'a str, + min: f64, + max: f64, + ) -> Box + 'a> { + Box::new(self.filter(move |c| { + if let Some(Value::Float(value)) = c.fields.get(trait_name) { + *value >= min && *value <= max + } else { + false + } + })) + } + + fn with_field( + self, + field_name: &'a str, + ) -> Box + 'a> { + Box::new(self.filter(move |c| c.fields.contains_key(field_name))) + } + + fn with_field_value( + self, + field_name: &'a str, + value: Value, + ) -> Box + 'a> { + Box::new(self.filter(move |c| c.fields.get(field_name) == Some(&value))) + } +} + +/// Extension methods for querying relationships +pub trait RelationshipQuery<'a> { + /// Filter relationships by bond strength + fn with_bond_range( + self, + min: f64, + max: f64, + ) -> Box + 'a>; + + /// Filter relationships that include a specific participant + fn with_participant( + self, + participant_name: &'a str, + ) -> Box + 'a>; + + /// Filter relationships that have a specific field + fn with_field( + self, + field_name: &'a str, + ) -> Box + 'a>; +} + +impl<'a, I> RelationshipQuery<'a> for I +where + I: Iterator + 'a, +{ + fn with_bond_range( + self, + min: f64, + max: f64, + ) -> Box + 'a> { + Box::new(self.filter(move |r| { + if let Some(Value::Float(bond)) = r.fields.get("bond") { + *bond >= min && *bond <= max + } else { + false + } + })) + } + + fn with_participant( + self, + participant_name: &'a str, + ) -> Box + 'a> { + Box::new(self.filter(move |r| { + r.participants + .iter() + .any(|p| p.name.last().is_some_and(|name| name == participant_name)) + })) + } + + fn with_field( + self, + field_name: &'a str, + ) -> Box + 'a> { + Box::new(self.filter(move |r| r.fields.contains_key(field_name))) + } +} + +/// Extension methods for querying schedules +pub trait ScheduleQuery<'a> { + /// Filter schedules that have an activity + fn with_activity( + self, + activity: &'a str, + ) -> Box + 'a>; +} + +impl<'a, I> ScheduleQuery<'a> for I +where + I: Iterator + 'a, +{ + fn with_activity( + self, + activity: &'a str, + ) -> Box + 'a> { + Box::new(self.filter(move |s| s.blocks.iter().any(|block| block.activity == activity))) + } +} + +#[cfg(test)] +mod tests { + use std::collections::HashMap; + + use super::*; + use crate::syntax::ast::Span; + + fn make_character(name: &str, age: i64, trust: f64) -> ResolvedCharacter { + let mut fields = HashMap::new(); + fields.insert("age".to_string(), Value::Int(age)); + fields.insert("trust".to_string(), Value::Float(trust)); + + ResolvedCharacter { + name: name.to_string(), + fields, + prose_blocks: HashMap::new(), + span: Span::new(0, 10), + } + } + + #[test] + fn test_filter_by_age_range() { + let characters = [ + make_character("Alice", 25, 0.8), + make_character("Bob", 35, 0.6), + make_character("Charlie", 45, 0.9), + ]; + + let filtered: Vec<_> = characters.iter().with_age_range(30, 50).collect(); + + assert_eq!(filtered.len(), 2); + assert_eq!(filtered[0].name, "Bob"); + assert_eq!(filtered[1].name, "Charlie"); + } + + #[test] + fn test_filter_by_trait() { + let characters = [ + make_character("Alice", 25, 0.8), + make_character("Bob", 35, 0.6), + make_character("Charlie", 45, 0.9), + ]; + + let filtered: Vec<_> = characters.iter().with_trait("trust", 0.75, 1.0).collect(); + + assert_eq!(filtered.len(), 2); + assert_eq!(filtered[0].name, "Alice"); + assert_eq!(filtered[1].name, "Charlie"); + } + + #[test] + fn test_chain_filters() { + let characters = [ + make_character("Alice", 25, 0.8), + make_character("Bob", 35, 0.6), + make_character("Charlie", 45, 0.9), + make_character("David", 40, 0.85), + ]; + + // Find characters aged 30-50 with trust > 0.8 + let filtered: Vec<_> = characters + .iter() + .with_age_range(30, 50) + .with_trait("trust", 0.8, 1.0) + .collect(); + + assert_eq!(filtered.len(), 2); + assert_eq!(filtered[0].name, "Charlie"); + assert_eq!(filtered[1].name, "David"); + } + + #[test] + fn test_filter_with_field() { + let mut char1 = make_character("Alice", 25, 0.8); + char1 + .fields + .insert("job".to_string(), Value::String("baker".to_string())); + + let char2 = make_character("Bob", 35, 0.6); + + let characters = [char1, char2]; + + let filtered: Vec<_> = characters.iter().with_field("job").collect(); + + assert_eq!(filtered.len(), 1); + assert_eq!(filtered[0].name, "Alice"); + } + + #[test] + fn test_relationship_with_bond_range() { + let mut fields1 = HashMap::new(); + fields1.insert("bond".to_string(), Value::Float(0.9)); + + let mut fields2 = HashMap::new(); + fields2.insert("bond".to_string(), Value::Float(0.5)); + + let relationships = [ + ResolvedRelationship { + name: "Strong".to_string(), + participants: vec![], + fields: fields1, + span: Span::new(0, 10), + }, + ResolvedRelationship { + name: "Weak".to_string(), + participants: vec![], + fields: fields2, + span: Span::new(0, 10), + }, + ]; + + let filtered: Vec<_> = relationships.iter().with_bond_range(0.8, 1.0).collect(); + + assert_eq!(filtered.len(), 1); + assert_eq!(filtered[0].name, "Strong"); + } +} diff --git a/src/resolve/convert.rs b/src/resolve/convert.rs new file mode 100644 index 0000000..be1241f --- /dev/null +++ b/src/resolve/convert.rs @@ -0,0 +1,768 @@ +//! Conversion from AST to resolved types +//! +//! This module handles converting parsed AST declarations into fully resolved +//! types that are ready for consumption by the game engine. It: +//! - Converts field vectors to HashMaps for efficient lookup +//! - Extracts prose blocks into separate collections +//! - Applies template overrides +//! - Validates that all references exist + +use std::collections::HashMap; + +use crate::{ + resolve::{ + merge, + names::NameTable, + validate, + ErrorCollector, + ResolveError, + Result, + }, + syntax::ast::{ + self, + ProseBlock, + Value, + }, + types::*, +}; + +/// Convert a parsed file into resolved declarations +/// +/// This is the old version that doesn't handle template composition. +/// Use `convert_file_with_templates` for full template support. +pub fn convert_file(file: &ast::File) -> Result> { + // Use the template-aware version + convert_file_with_templates(file) +} + +/// Convert a parsed file into resolved declarations with template composition +/// support +pub fn convert_file_with_templates(file: &ast::File) -> Result> { + // Build name table for template lookups + let name_table = NameTable::from_file(file)?; + let mut resolved = Vec::new(); + + for decl in &file.declarations { + match decl { + | ast::Declaration::Character(c) => { + // Use template-aware conversion + let resolved_char = + convert_character_with_templates(c, &file.declarations, &name_table)?; + resolved.push(ResolvedDeclaration::Character(resolved_char)); + }, + | ast::Declaration::Template(t) => { + // Use include-aware conversion + let resolved_template = + convert_template_with_includes(t, &file.declarations, &name_table)?; + resolved.push(ResolvedDeclaration::Template(resolved_template)); + }, + | ast::Declaration::LifeArc(la) => { + resolved.push(ResolvedDeclaration::LifeArc(convert_life_arc(la)?)); + }, + | ast::Declaration::Schedule(s) => { + resolved.push(ResolvedDeclaration::Schedule(convert_schedule(s)?)); + }, + | ast::Declaration::Behavior(b) => { + resolved.push(ResolvedDeclaration::Behavior(convert_behavior(b)?)); + }, + | ast::Declaration::Institution(i) => { + resolved.push(ResolvedDeclaration::Institution(convert_institution(i)?)); + }, + | ast::Declaration::Relationship(r) => { + resolved.push(ResolvedDeclaration::Relationship(convert_relationship(r)?)); + }, + | ast::Declaration::Location(l) => { + resolved.push(ResolvedDeclaration::Location(convert_location(l)?)); + }, + | ast::Declaration::Species(s) => { + resolved.push(ResolvedDeclaration::Species(convert_species(s)?)); + }, + | ast::Declaration::Enum(e) => { + resolved.push(ResolvedDeclaration::Enum(convert_enum(e)?)); + }, + | ast::Declaration::Use(_) => { + // Use declarations are handled during name resolution, not + // conversion + }, + } + } + + Ok(resolved) +} + +/// Convert character AST to resolved type +pub fn convert_character(character: &ast::Character) -> Result { + let (fields, prose_blocks) = extract_fields_and_prose(&character.fields)?; + + Ok(ResolvedCharacter { + name: character.name.clone(), + fields, + prose_blocks, + span: character.span.clone(), + }) +} + +/// Convert character AST to resolved type with template composition +/// +/// This version resolves template inheritance by: +/// 1. Merging all templates listed in `from Template1, Template2` +/// 2. Recursively resolving template includes +/// 3. Validating strict mode requirements +/// 4. Applying character's own fields on top +pub fn convert_character_with_templates( + character: &ast::Character, + declarations: &[ast::Declaration], + name_table: &NameTable, +) -> Result { + // Merge character templates if any + let merged_fields = if character.template.is_some() { + merge::merge_character_templates(character, declarations, name_table)? + } else { + character.fields.clone() + }; + + // Extract fields and prose blocks from merged result + let (fields, prose_blocks) = extract_fields_and_prose(&merged_fields)?; + + Ok(ResolvedCharacter { + name: character.name.clone(), + fields, + prose_blocks, + span: character.span.clone(), + }) +} + +/// Convert template AST to resolved type +pub fn convert_template(template: &ast::Template) -> Result { + let (fields, _) = extract_fields_and_prose(&template.fields)?; + + Ok(ResolvedTemplate { + name: template.name.clone(), + fields, + span: template.span.clone(), + }) +} + +/// Convert template AST to resolved type with include resolution +/// +/// This version resolves template includes by: +/// 1. Recursively resolving all included templates +/// 2. Merging included fields (later includes override earlier ones) +/// 3. Adding template's own fields on top +pub fn convert_template_with_includes( + template: &ast::Template, + declarations: &[ast::Declaration], + name_table: &NameTable, +) -> Result { + // Resolve template includes if any + let merged_fields = if !template.includes.is_empty() { + let mut visited = std::collections::HashSet::new(); + merge::resolve_template_includes(template, declarations, name_table, &mut visited)? + } else { + template.fields.clone() + }; + + // Extract fields (templates don't have prose blocks) + let (fields, _) = extract_fields_and_prose(&merged_fields)?; + + Ok(ResolvedTemplate { + name: template.name.clone(), + fields, + span: template.span.clone(), + }) +} + +/// Convert life arc AST to resolved type +pub fn convert_life_arc(life_arc: &ast::LifeArc) -> Result { + let states = life_arc + .states + .iter() + .map(|state| ResolvedArcState { + name: state.name.clone(), + transitions: state.transitions.clone(), + span: state.span.clone(), + }) + .collect(); + + Ok(ResolvedLifeArc { + name: life_arc.name.clone(), + states, + span: life_arc.span.clone(), + }) +} + +/// Convert schedule AST to resolved type +pub fn convert_schedule(schedule: &ast::Schedule) -> Result { + let blocks = schedule + .blocks + .iter() + .map(|block| ResolvedScheduleBlock { + activity: block.activity.clone(), + start: block.start.clone(), + end: block.end.clone(), + span: block.span.clone(), + }) + .collect(); + + Ok(ResolvedSchedule { + name: schedule.name.clone(), + blocks, + span: schedule.span.clone(), + }) +} + +/// Convert behavior tree AST to resolved type +pub fn convert_behavior(behavior: &ast::Behavior) -> Result { + Ok(ResolvedBehavior { + name: behavior.name.clone(), + root: behavior.root.clone(), + span: behavior.span.clone(), + }) +} + +/// Convert institution AST to resolved type +pub fn convert_institution(institution: &ast::Institution) -> Result { + let (fields, _) = extract_fields_and_prose(&institution.fields)?; + + Ok(ResolvedInstitution { + name: institution.name.clone(), + fields, + span: institution.span.clone(), + }) +} + +/// Convert relationship AST to resolved type +pub fn convert_relationship(relationship: &ast::Relationship) -> Result { + let (fields, _) = extract_fields_and_prose(&relationship.fields)?; + + Ok(ResolvedRelationship { + name: relationship.name.clone(), + participants: relationship.participants.clone(), + fields, + span: relationship.span.clone(), + }) +} + +/// Convert location AST to resolved type +pub fn convert_location(location: &ast::Location) -> Result { + let (fields, _) = extract_fields_and_prose(&location.fields)?; + + Ok(ResolvedLocation { + name: location.name.clone(), + fields, + span: location.span.clone(), + }) +} + +/// Convert species AST to resolved type +pub fn convert_species(species: &ast::Species) -> Result { + let (fields, _) = extract_fields_and_prose(&species.fields)?; + + Ok(ResolvedSpecies { + name: species.name.clone(), + fields, + span: species.span.clone(), + }) +} + +/// Convert enum AST to resolved type +pub fn convert_enum(enum_decl: &ast::EnumDecl) -> Result { + Ok(ResolvedEnum { + name: enum_decl.name.clone(), + variants: enum_decl.variants.clone(), + span: enum_decl.span.clone(), + }) +} + +/// Extract fields and prose blocks from a field list +/// +/// Returns (fields_map, prose_blocks_map) +fn extract_fields_and_prose( + fields: &[ast::Field], +) -> Result<(HashMap, HashMap)> { + let mut fields_map = HashMap::new(); + let mut prose_map = HashMap::new(); + let mut collector = ErrorCollector::new(); + + // Validate no reserved keywords + validate::validate_no_reserved_keywords(fields, &mut collector); + + for field in fields { + // Check for duplicate field names + if fields_map.contains_key(&field.name) || prose_map.contains_key(&field.name) { + collector.add(ResolveError::ValidationError { + message: format!("Duplicate field name: '{}'", field.name), + help: Some(format!( + "Each field name must be unique. The field '{}' appears more than once. Remove or rename the duplicate field.", + field.name + )), + }); + continue; // Continue collecting errors + } + + match &field.value { + | Value::ProseBlock(prose) => { + prose_map.insert(field.name.clone(), prose.clone()); + }, + | value => { + fields_map.insert(field.name.clone(), value.clone()); + }, + } + } + + // Return any accumulated errors + collector.into_result((fields_map, prose_map)) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::syntax::ast::{ + Character, + EnumDecl, + Field, + Span, + }; + + #[test] + fn test_convert_simple_character() { + let character = Character { + name: "Martha".to_string(), + fields: vec![ + Field { + name: "age".to_string(), + value: Value::Int(34), + span: Span::new(0, 10), + }, + Field { + name: "health".to_string(), + value: Value::Float(0.8), + span: Span::new(10, 20), + }, + ], + template: None, + span: Span::new(0, 50), + }; + + let resolved = convert_character(&character).unwrap(); + + assert_eq!(resolved.name, "Martha"); + assert_eq!(resolved.fields.len(), 2); + assert_eq!(resolved.fields.get("age"), Some(&Value::Int(34))); + assert_eq!(resolved.fields.get("health"), Some(&Value::Float(0.8))); + assert_eq!(resolved.prose_blocks.len(), 0); + } + + #[test] + fn test_convert_character_with_prose() { + let prose_block = ProseBlock { + tag: "backstory".to_string(), + content: "Martha grew up in a small town.".to_string(), + span: Span::new(10, 50), + }; + + let character = Character { + name: "Martha".to_string(), + fields: vec![ + Field { + name: "age".to_string(), + value: Value::Int(34), + span: Span::new(0, 10), + }, + Field { + name: "backstory".to_string(), + value: Value::ProseBlock(prose_block.clone()), + span: Span::new(10, 50), + }, + ], + template: None, + span: Span::new(0, 100), + }; + + let resolved = convert_character(&character).unwrap(); + + assert_eq!(resolved.name, "Martha"); + assert_eq!(resolved.fields.len(), 1); + assert_eq!(resolved.fields.get("age"), Some(&Value::Int(34))); + assert_eq!(resolved.prose_blocks.len(), 1); + assert_eq!(resolved.prose_blocks.get("backstory"), Some(&prose_block)); + } + + #[test] + fn test_convert_character_duplicate_field_fails() { + let character = Character { + name: "Martha".to_string(), + fields: vec![ + Field { + name: "age".to_string(), + value: Value::Int(34), + span: Span::new(0, 10), + }, + Field { + name: "age".to_string(), + value: Value::Int(35), + span: Span::new(10, 20), + }, + ], + template: None, + span: Span::new(0, 50), + }; + + let result = convert_character(&character); + assert!(result.is_err()); + } + + #[test] + fn test_convert_enum() { + let enum_decl = EnumDecl { + name: "Status".to_string(), + variants: vec!["active".to_string(), "inactive".to_string()], + span: Span::new(0, 50), + }; + + let resolved = convert_enum(&enum_decl).unwrap(); + + assert_eq!(resolved.name, "Status"); + assert_eq!(resolved.variants.len(), 2); + assert_eq!(resolved.variants[0], "active"); + assert_eq!(resolved.variants[1], "inactive"); + } + + #[test] + fn test_convert_file_mixed_declarations() { + let file = ast::File { + declarations: vec![ + ast::Declaration::Character(Character { + name: "Martha".to_string(), + fields: vec![Field { + name: "age".to_string(), + value: Value::Int(34), + span: Span::new(0, 10), + }], + template: None, + span: Span::new(0, 50), + }), + ast::Declaration::Enum(EnumDecl { + name: "Status".to_string(), + variants: vec!["active".to_string()], + span: Span::new(50, 100), + }), + ], + }; + + let resolved = convert_file(&file).unwrap(); + + assert_eq!(resolved.len(), 2); + match &resolved[0] { + | ResolvedDeclaration::Character(c) => assert_eq!(c.name, "Martha"), + | _ => panic!("Expected Character"), + } + match &resolved[1] { + | ResolvedDeclaration::Enum(e) => assert_eq!(e.name, "Status"), + | _ => panic!("Expected Enum"), + } + } + + #[test] + fn test_convert_file_skips_use_declarations() { + let file = ast::File { + declarations: vec![ + ast::Declaration::Use(ast::UseDecl { + path: vec!["foo".to_string()], + kind: ast::UseKind::Wildcard, + span: Span::new(0, 10), + }), + ast::Declaration::Character(Character { + name: "Martha".to_string(), + fields: vec![], + template: None, + + span: Span::new(10, 50), + }), + ], + }; + + let resolved = convert_file(&file).unwrap(); + + // Should only have the character, not the use declaration + assert_eq!(resolved.len(), 1); + match &resolved[0] { + | ResolvedDeclaration::Character(c) => assert_eq!(c.name, "Martha"), + | _ => panic!("Expected Character"), + } + } + + #[test] + fn test_extract_fields_and_prose_empty() { + let (fields, prose) = extract_fields_and_prose(&[]).unwrap(); + assert_eq!(fields.len(), 0); + assert_eq!(prose.len(), 0); + } + + #[test] + fn test_extract_fields_and_prose_mixed() { + let prose_block = ProseBlock { + tag: "description".to_string(), + content: "Test content".to_string(), + span: Span::new(10, 30), + }; + + let fields = vec![ + Field { + name: "age".to_string(), + value: Value::Int(30), + span: Span::new(0, 10), + }, + Field { + name: "description".to_string(), + value: Value::ProseBlock(prose_block.clone()), + span: Span::new(10, 30), + }, + Field { + name: "active".to_string(), + value: Value::Bool(true), + span: Span::new(30, 40), + }, + ]; + + let (field_map, prose_map) = extract_fields_and_prose(&fields).unwrap(); + + assert_eq!(field_map.len(), 2); + assert_eq!(field_map.get("age"), Some(&Value::Int(30))); + assert_eq!(field_map.get("active"), Some(&Value::Bool(true))); + + assert_eq!(prose_map.len(), 1); + assert_eq!(prose_map.get("description"), Some(&prose_block)); + } + + // ===== Template Composition Integration Tests ===== + + #[test] + fn test_convert_character_with_single_template() { + use crate::resolve::names::NameTable; + + let template = ast::Template { + name: "Person".to_string(), + fields: vec![Field { + name: "type".to_string(), // Changed from "species" + value: Value::String("human".to_string()), + span: Span::new(0, 10), + }], + strict: false, + includes: vec![], + span: Span::new(0, 50), + }; + + let character = Character { + name: "Martha".to_string(), + fields: vec![Field { + name: "age".to_string(), + value: Value::Int(34), + span: Span::new(0, 10), + }], + template: Some(vec!["Person".to_string()]), + span: Span::new(0, 100), + }; + + let declarations = vec![ + ast::Declaration::Template(template), + ast::Declaration::Character(character.clone()), + ]; + + let file = ast::File { + declarations: declarations.clone(), + }; + let name_table = NameTable::from_file(&file).unwrap(); + + let resolved = + convert_character_with_templates(&character, &declarations, &name_table).unwrap(); + + assert_eq!(resolved.name, "Martha"); + assert_eq!(resolved.fields.len(), 2); + assert_eq!(resolved.fields.get("age"), Some(&Value::Int(34))); + assert_eq!( + resolved.fields.get("type"), + Some(&Value::String("human".to_string())) + ); + } + + #[test] + fn test_convert_character_with_multiple_templates() { + use crate::resolve::names::NameTable; + + let physical = ast::Template { + name: "Physical".to_string(), + fields: vec![Field { + name: "height".to_string(), + value: Value::Int(0), + span: Span::new(0, 10), + }], + strict: false, + includes: vec![], + span: Span::new(0, 50), + }; + + let mental = ast::Template { + name: "Mental".to_string(), + fields: vec![Field { + name: "iq".to_string(), + value: Value::Int(0), + span: Span::new(0, 10), + }], + strict: false, + includes: vec![], + span: Span::new(0, 50), + }; + + let character = Character { + name: "Martha".to_string(), + fields: vec![ + Field { + name: "height".to_string(), + value: Value::Int(165), + span: Span::new(0, 10), + }, + Field { + name: "iq".to_string(), + value: Value::Int(120), + span: Span::new(10, 20), + }, + ], + template: Some(vec!["Physical".to_string(), "Mental".to_string()]), + span: Span::new(0, 100), + }; + + let declarations = vec![ + ast::Declaration::Template(physical), + ast::Declaration::Template(mental), + ast::Declaration::Character(character.clone()), + ]; + + let file = ast::File { + declarations: declarations.clone(), + }; + let name_table = NameTable::from_file(&file).unwrap(); + + let resolved = + convert_character_with_templates(&character, &declarations, &name_table).unwrap(); + + assert_eq!(resolved.name, "Martha"); + assert_eq!(resolved.fields.len(), 2); + assert_eq!(resolved.fields.get("height"), Some(&Value::Int(165))); + assert_eq!(resolved.fields.get("iq"), Some(&Value::Int(120))); + } + + #[test] + fn test_convert_template_with_includes() { + use crate::resolve::names::NameTable; + + let base = ast::Template { + name: "Human".to_string(), + fields: vec![Field { + name: "type".to_string(), // Changed from "species" + value: Value::String("human".to_string()), + span: Span::new(0, 10), + }], + strict: false, + includes: vec![], + span: Span::new(0, 50), + }; + + let derived = ast::Template { + name: "Person".to_string(), + fields: vec![Field { + name: "age".to_string(), + value: Value::Int(0), + span: Span::new(0, 10), + }], + strict: false, + includes: vec!["Human".to_string()], + span: Span::new(0, 50), + }; + + let declarations = vec![ + ast::Declaration::Template(base), + ast::Declaration::Template(derived.clone()), + ]; + + let file = ast::File { + declarations: declarations.clone(), + }; + let name_table = NameTable::from_file(&file).unwrap(); + + let resolved = + convert_template_with_includes(&derived, &declarations, &name_table).unwrap(); + + assert_eq!(resolved.name, "Person"); + assert_eq!(resolved.fields.len(), 2); + assert_eq!(resolved.fields.get("age"), Some(&Value::Int(0))); + assert_eq!( + resolved.fields.get("type"), + Some(&Value::String("human".to_string())) + ); + } + + #[test] + fn test_convert_character_reserved_keyword_fails() { + let character = Character { + name: "Martha".to_string(), + fields: vec![Field { + name: "species".to_string(), // Reserved keyword! + value: Value::String("human".to_string()), + span: Span::new(0, 10), + }], + template: None, + span: Span::new(0, 50), + }; + + let result = convert_character(&character); + assert!(result.is_err()); + if let Err(ResolveError::ValidationError { message, help }) = result { + assert!(message.contains("reserved keyword")); + assert!(message.contains("species")); + assert!(help.is_some()); + } else { + panic!("Expected ValidationError for reserved keyword"); + } + } + + #[test] + fn test_convert_character_strict_mode_validation() { + use crate::resolve::names::NameTable; + + let template = ast::Template { + name: "Person".to_string(), + fields: vec![Field { + name: "age".to_string(), + value: Value::Range(Box::new(Value::Int(18)), Box::new(Value::Int(65))), + span: Span::new(0, 10), + }], + strict: true, + includes: vec![], + span: Span::new(0, 50), + }; + + let character = Character { + name: "Martha".to_string(), + fields: vec![], // No fields - inherits range from template + template: Some(vec!["Person".to_string()]), + span: Span::new(0, 100), + }; + + let declarations = vec![ + ast::Declaration::Template(template), + ast::Declaration::Character(character.clone()), + ]; + + let file = ast::File { + declarations: declarations.clone(), + }; + let name_table = NameTable::from_file(&file).unwrap(); + + let result = convert_character_with_templates(&character, &declarations, &name_table); + assert!(result.is_err()); + if let Err(ResolveError::ValidationError { message, .. }) = result { + assert!(message.contains("strict template")); + } + } +} diff --git a/src/resolve/convert_integration_tests.rs b/src/resolve/convert_integration_tests.rs new file mode 100644 index 0000000..9a03ac5 --- /dev/null +++ b/src/resolve/convert_integration_tests.rs @@ -0,0 +1,407 @@ +//! Integration tests for the full conversion pipeline + +use crate::{ + resolve::convert::convert_file, + syntax::{ + ast::*, + FileParser, + }, + types::*, +}; + +/// Helper to parse and convert a source string +fn parse_and_convert(source: &str) -> Result, Box> { + let lexer = crate::syntax::lexer::Lexer::new(source); + let file = FileParser::new().parse(lexer)?; + Ok(convert_file(&file)?) +} + +#[test] +fn test_simple_character_end_to_end() { + let source = r#" + character Martha { + age: 34 + health: 0.8 + } + "#; + + let resolved = parse_and_convert(source).unwrap(); + assert_eq!(resolved.len(), 1); + + match &resolved[0] { + | ResolvedDeclaration::Character(c) => { + assert_eq!(c.name, "Martha"); + assert_eq!(c.fields.len(), 2); + assert_eq!(c.fields.get("age"), Some(&Value::Int(34))); + assert_eq!(c.fields.get("health"), Some(&Value::Float(0.8))); + }, + | _ => panic!("Expected Character"), + } +} + +#[test] +fn test_character_with_prose_end_to_end() { + let source = r#" + character Martha { + age: 34 + backstory: ---backstory +Martha grew up in a small town. +She loved baking from a young age. +--- + } + "#; + + let resolved = parse_and_convert(source).unwrap(); + assert_eq!(resolved.len(), 1); + + match &resolved[0] { + | ResolvedDeclaration::Character(c) => { + assert_eq!(c.name, "Martha"); + assert_eq!(c.fields.len(), 1); + assert_eq!(c.fields.get("age"), Some(&Value::Int(34))); + assert_eq!(c.prose_blocks.len(), 1); + + let backstory = c.prose_blocks.get("backstory").unwrap(); + assert_eq!(backstory.tag, "backstory"); + assert!(backstory.content.contains("Martha grew up")); + }, + | _ => panic!("Expected Character"), + } +} + +#[test] +fn test_multiple_declarations_end_to_end() { + let source = r#" + character Martha { + age: 34 + } + + character David { + age: 36 + } + + enum Status { + active, inactive, pending + } + "#; + + let resolved = parse_and_convert(source).unwrap(); + assert_eq!(resolved.len(), 3); + + let char_count = resolved + .iter() + .filter(|d| matches!(d, ResolvedDeclaration::Character(_))) + .count(); + let enum_count = resolved + .iter() + .filter(|d| matches!(d, ResolvedDeclaration::Enum(_))) + .count(); + + assert_eq!(char_count, 2); + assert_eq!(enum_count, 1); +} + +#[test] +fn test_relationship_end_to_end() { + let source = r#" + relationship Spousal { + Martha + David + bond: 0.9 + } + "#; + + let resolved = parse_and_convert(source).unwrap(); + assert_eq!(resolved.len(), 1); + + match &resolved[0] { + | ResolvedDeclaration::Relationship(r) => { + assert_eq!(r.name, "Spousal"); + assert_eq!(r.participants.len(), 2); + assert_eq!(r.fields.get("bond"), Some(&Value::Float(0.9))); + }, + | _ => panic!("Expected Relationship"), + } +} + +#[test] +fn test_life_arc_end_to_end() { + let source = r#" + life_arc Growth { + state child { + on age > 12 -> teen + } + state teen { + on age > 18 -> adult + } + state adult {} + } + "#; + + let resolved = parse_and_convert(source).unwrap(); + assert_eq!(resolved.len(), 1); + + match &resolved[0] { + | ResolvedDeclaration::LifeArc(la) => { + assert_eq!(la.name, "Growth"); + assert_eq!(la.states.len(), 3); + assert_eq!(la.states[0].name, "child"); + assert_eq!(la.states[1].name, "teen"); + assert_eq!(la.states[2].name, "adult"); + }, + | _ => panic!("Expected LifeArc"), + } +} + +#[test] +fn test_behavior_tree_end_to_end() { + let source = r#" + behavior WorkAtBakery { + > { + walk + work(duration: 8h) + rest + } + } + "#; + + let resolved = parse_and_convert(source).unwrap(); + assert_eq!(resolved.len(), 1); + + match &resolved[0] { + | ResolvedDeclaration::Behavior(b) => { + assert_eq!(b.name, "WorkAtBakery"); + // Root should be a Sequence node + assert!(matches!(b.root, BehaviorNode::Sequence(_))); + }, + | _ => panic!("Expected Behavior"), + } +} + +#[test] +fn test_schedule_end_to_end() { + let source = r#" + schedule DailyRoutine { + 08:00 -> 12:00: work + 12:00 -> 13:00: lunch + 13:00 -> 17:00: work + } + "#; + + let resolved = parse_and_convert(source).unwrap(); + assert_eq!(resolved.len(), 1); + + match &resolved[0] { + | ResolvedDeclaration::Schedule(s) => { + assert_eq!(s.name, "DailyRoutine"); + assert_eq!(s.blocks.len(), 3); + assert_eq!(s.blocks[0].activity, "work"); + assert_eq!(s.blocks[1].activity, "lunch"); + assert_eq!(s.blocks[2].activity, "work"); + }, + | _ => panic!("Expected Schedule"), + } +} + +#[test] +fn test_institution_end_to_end() { + let source = r#" + institution Bakery { + employees: 5 + revenue: 50000 + } + "#; + + let resolved = parse_and_convert(source).unwrap(); + assert_eq!(resolved.len(), 1); + + match &resolved[0] { + | ResolvedDeclaration::Institution(i) => { + assert_eq!(i.name, "Bakery"); + assert_eq!(i.fields.get("employees"), Some(&Value::Int(5))); + assert_eq!(i.fields.get("revenue"), Some(&Value::Int(50000))); + }, + | _ => panic!("Expected Institution"), + } +} + +#[test] +fn test_location_end_to_end() { + let source = r#" + location Bakery { + x: 100 + y: 200 + } + "#; + + let resolved = parse_and_convert(source).unwrap(); + assert_eq!(resolved.len(), 1); + + match &resolved[0] { + | ResolvedDeclaration::Location(l) => { + assert_eq!(l.name, "Bakery"); + assert_eq!(l.fields.get("x"), Some(&Value::Int(100))); + assert_eq!(l.fields.get("y"), Some(&Value::Int(200))); + }, + | _ => panic!("Expected Location"), + } +} + +#[test] +fn test_species_end_to_end() { + let source = r#" + species Human { + lifespan: 80 + intelligence: 0.9 + } + "#; + + let resolved = parse_and_convert(source).unwrap(); + assert_eq!(resolved.len(), 1); + + match &resolved[0] { + | ResolvedDeclaration::Species(s) => { + assert_eq!(s.name, "Human"); + assert_eq!(s.fields.get("lifespan"), Some(&Value::Int(80))); + assert_eq!(s.fields.get("intelligence"), Some(&Value::Float(0.9))); + }, + | _ => panic!("Expected Species"), + } +} + +#[test] +fn test_template_end_to_end() { + let source = r#" + template Adult { + age: 20..60 + health: 0.5..1.0 + } + "#; + + let resolved = parse_and_convert(source).unwrap(); + assert_eq!(resolved.len(), 1); + + match &resolved[0] { + | ResolvedDeclaration::Template(t) => { + assert_eq!(t.name, "Adult"); + assert_eq!(t.fields.len(), 2); + // Templates keep their range values + assert!(matches!(t.fields.get("age"), Some(Value::Range(_, _)))); + }, + | _ => panic!("Expected Template"), + } +} + +#[test] +fn test_use_declarations_are_skipped() { + let source = r#" + use characters::*; + + character Martha { + age: 34 + } + "#; + + let resolved = parse_and_convert(source).unwrap(); + // Should only have character, not use declaration + assert_eq!(resolved.len(), 1); + assert!(matches!(resolved[0], ResolvedDeclaration::Character(_))); +} + +#[test] +fn test_complex_mixed_file() { + let source = r#" + use relationships::*; + + character Martha { + age: 34 + backstory: ---backstory +Martha grew up in a small town. +--- + } + + character David { + age: 36 + } + + relationship Spousal { + Martha + David + bond: 0.9 + } + + enum BondType { + romantic, familial, friendship + } + + schedule DailyRoutine { + 08:00 -> 12:00: work + 12:00 -> 13:00: lunch + } + "#; + + let resolved = parse_and_convert(source).unwrap(); + + // Count each type + let chars = resolved + .iter() + .filter(|d| matches!(d, ResolvedDeclaration::Character(_))) + .count(); + let rels = resolved + .iter() + .filter(|d| matches!(d, ResolvedDeclaration::Relationship(_))) + .count(); + let enums = resolved + .iter() + .filter(|d| matches!(d, ResolvedDeclaration::Enum(_))) + .count(); + let scheds = resolved + .iter() + .filter(|d| matches!(d, ResolvedDeclaration::Schedule(_))) + .count(); + + assert_eq!(chars, 2); + assert_eq!(rels, 1); + assert_eq!(enums, 1); + assert_eq!(scheds, 1); + assert_eq!(resolved.len(), 5); // Total, excluding use declaration +} + +#[test] +fn test_duplicate_field_names_error() { + let source = r#" + character Martha { + age: 34 + age: 35 + } + "#; + + let result = parse_and_convert(source); + assert!(result.is_err(), "Duplicate field names should cause error"); +} + +#[test] +fn test_all_value_types_convert() { + let source = r#" + character Test { + int_val: 42 + float_val: 3.5 + bool_val: true + string_val: "hello" + } + "#; + + let resolved = parse_and_convert(source).unwrap(); + match &resolved[0] { + | ResolvedDeclaration::Character(c) => { + assert_eq!(c.fields.get("int_val"), Some(&Value::Int(42))); + assert_eq!(c.fields.get("float_val"), Some(&Value::Float(3.5))); + assert_eq!(c.fields.get("bool_val"), Some(&Value::Bool(true))); + assert_eq!( + c.fields.get("string_val"), + Some(&Value::String("hello".to_string())) + ); + }, + | _ => panic!("Expected Character"), + } +} diff --git a/src/resolve/convert_prop_tests.rs b/src/resolve/convert_prop_tests.rs new file mode 100644 index 0000000..22ef00c --- /dev/null +++ b/src/resolve/convert_prop_tests.rs @@ -0,0 +1,371 @@ +//! Property tests for AST to resolved type conversion + +use proptest::prelude::*; + +use crate::{ + resolve::convert::{ + convert_character, + convert_enum, + convert_file, + }, + syntax::ast::*, +}; + +// ===== Generators ===== + +// Reserved keywords that cannot be used as field names +const RESERVED_KEYWORDS: &[&str] = &[ + "character", + "template", + "life_arc", + "schedule", + "behavior", + "institution", + "relationship", + "location", + "species", + "enum", + "use", + "state", + "on", + "as", + "remove", + "append", + "strict", + "include", + "from", + "self", + "other", + "forall", + "exists", + "in", + "where", + "and", + "or", + "not", + "is", + "true", + "false", +]; + +fn valid_ident() -> impl Strategy { + "[a-zA-Z_][a-zA-Z0-9_]{0,15}" + .prop_filter("not reserved", |s| !RESERVED_KEYWORDS.contains(&s.as_str())) +} + +fn valid_value() -> impl Strategy { + prop_oneof![ + (-1000i64..1000).prop_map(Value::Int), + (-1000.0..1000.0) + .prop_filter("finite", |f: &f64| f.is_finite()) + .prop_map(Value::Float), + any::().prop_map(Value::Bool), + "[a-zA-Z0-9 ]{0,30}".prop_map(Value::String), + ] +} + +fn valid_field() -> impl Strategy { + (valid_ident(), valid_value()).prop_map(|(name, value)| Field { + name, + value, + span: Span::new(0, 10), + }) +} + +fn valid_unique_fields() -> impl Strategy> { + prop::collection::vec(valid_field(), 0..10).prop_map(|fields| { + let mut unique_fields = Vec::new(); + let mut seen_names = std::collections::HashSet::new(); + + for field in fields { + if seen_names.insert(field.name.clone()) { + unique_fields.push(field); + } + } + unique_fields + }) +} + +fn valid_character() -> impl Strategy { + (valid_ident(), valid_unique_fields()).prop_map(|(name, fields)| Character { + name, + fields, + template: None, + + span: Span::new(0, 100), + }) +} + +fn valid_enum() -> impl Strategy { + (valid_ident(), prop::collection::vec(valid_ident(), 1..10)).prop_map(|(name, variants)| { + EnumDecl { + name, + variants, + span: Span::new(0, 100), + } + }) +} + +// ===== Property Tests ===== + +proptest! { + #[test] + fn test_character_name_preserved(character in valid_character()) { + let original_name = character.name.clone(); + let resolved = convert_character(&character).unwrap(); + assert_eq!(resolved.name, original_name); + } + + #[test] + fn test_character_field_count_preserved(character in valid_character()) { + let original_count = character.fields.len(); + let resolved = convert_character(&character).unwrap(); + let total_count = resolved.fields.len() + resolved.prose_blocks.len(); + assert_eq!(total_count, original_count); + } + + #[test] + fn test_character_field_values_preserved(character in valid_character()) { + let resolved = convert_character(&character).unwrap(); + + for field in &character.fields { + match &field.value { + | Value::ProseBlock(_) => { + assert!(resolved.prose_blocks.contains_key(&field.name)); + }, + | value => { + assert_eq!(resolved.fields.get(&field.name), Some(value)); + }, + } + } + } + + #[test] + fn test_enum_name_preserved(enum_decl in valid_enum()) { + let original_name = enum_decl.name.clone(); + let resolved = convert_enum(&enum_decl).unwrap(); + assert_eq!(resolved.name, original_name); + } + + #[test] + fn test_enum_variants_preserved(enum_decl in valid_enum()) { + let resolved = convert_enum(&enum_decl).unwrap(); + assert_eq!(resolved.variants.len(), enum_decl.variants.len()); + for (i, variant) in enum_decl.variants.iter().enumerate() { + assert_eq!(&resolved.variants[i], variant); + } + } + + #[test] + fn test_convert_file_preserves_declaration_count( + characters in prop::collection::vec(valid_character(), 0..5), + enums in prop::collection::vec(valid_enum(), 0..5) + ) { + // Ensure unique names across all declarations to avoid duplicate definition errors + let mut seen_names = std::collections::HashSet::new(); + let mut declarations = Vec::new(); + + for char in characters { + if seen_names.insert(char.name.clone()) { + declarations.push(Declaration::Character(char)); + } + } + + for enum_decl in enums { + if seen_names.insert(enum_decl.name.clone()) { + declarations.push(Declaration::Enum(enum_decl)); + } + } + + let file = File { declarations: declarations.clone() }; + let resolved = convert_file(&file).unwrap(); + + // Should have same count (excluding Use declarations) + assert_eq!(resolved.len(), declarations.len()); + } + + #[test] + fn test_duplicate_field_names_rejected( + name in valid_ident(), + field_name in valid_ident(), + val1 in valid_value(), + val2 in valid_value() + ) { + let character = Character { + name, + fields: vec![ + Field { + name: field_name.clone(), + value: val1, + span: Span::new(0, 10), + }, + Field { + name: field_name, + value: val2, + span: Span::new(10, 20), + }, + ], + template: None, + span: Span::new(0, 50), + }; + + let result = convert_character(&character); + assert!(result.is_err(), "Duplicate field names should be rejected"); + } + + #[test] + fn test_field_lookup_is_efficient(character in valid_character()) { + let resolved = convert_character(&character).unwrap(); + + // All fields should be directly accessible in O(1) + for field in &character.fields { + if matches!(field.value, Value::ProseBlock(_)) { + assert!( + resolved.prose_blocks.contains_key(&field.name), + "Prose block {} should be in map", + field.name + ); + } else { + assert!( + resolved.fields.contains_key(&field.name), + "Field {} should be in map", + field.name + ); + } + } + } + + #[test] + fn test_empty_character_converts(name in valid_ident()) { + let character = Character { + name: name.clone(), + fields: vec![], + template: None, + + span: Span::new(0, 10), + }; + + let resolved = convert_character(&character).unwrap(); + assert_eq!(resolved.name, name); + assert_eq!(resolved.fields.len(), 0); + assert_eq!(resolved.prose_blocks.len(), 0); + } + + #[test] + fn test_conversion_is_deterministic(character in valid_character()) { + let resolved1 = convert_character(&character).unwrap(); + let resolved2 = convert_character(&character).unwrap(); + + assert_eq!(resolved1.name, resolved2.name); + assert_eq!(resolved1.fields.len(), resolved2.fields.len()); + assert_eq!(resolved1.prose_blocks.len(), resolved2.prose_blocks.len()); + + // All fields should match + for (key, value) in &resolved1.fields { + assert_eq!(resolved2.fields.get(key), Some(value)); + } + } + + #[test] + fn test_file_with_use_declarations_skips_them( + characters in prop::collection::vec(valid_character(), 1..5), + use_count in 0usize..5 + ) { + let mut declarations = vec![]; + + // Add some use declarations + for i in 0..use_count { + declarations.push(Declaration::Use(UseDecl { + path: vec![format!("module{}", i)], + kind: UseKind::Wildcard, + span: Span::new(0, 10), + })); + } + + // Add characters + let char_count = characters.len(); + declarations.extend(characters.into_iter().map(Declaration::Character)); + + let file = File { declarations }; + let resolved = convert_file(&file).unwrap(); + + // Should only have characters, not use declarations + assert_eq!(resolved.len(), char_count); + } +} + +#[cfg(test)] +mod edge_cases { + use super::*; + + proptest! { + #[test] + fn test_all_value_types_convert( + int_val in -1000i64..1000, + float_val in -1000.0..1000.0, + bool_val in any::(), + string_val in "[a-zA-Z0-9 ]{1,30}" + ) { + let character = Character { + name: "Test".to_string(), + fields: vec![ + Field { + name: "int_field".to_string(), + value: Value::Int(int_val), + span: Span::new(0, 10), + }, + Field { + name: "float_field".to_string(), + value: Value::Float(float_val), + span: Span::new(10, 20), + }, + Field { + name: "bool_field".to_string(), + value: Value::Bool(bool_val), + span: Span::new(20, 30), + }, + Field { + name: "string_field".to_string(), + value: Value::String(string_val.clone()), + span: Span::new(30, 40), + }, + ], + template: None, + span: Span::new(0, 50), + }; + + let resolved = convert_character(&character).unwrap(); + assert_eq!(resolved.fields.get("int_field"), Some(&Value::Int(int_val))); + assert_eq!(resolved.fields.get("float_field"), Some(&Value::Float(float_val))); + assert_eq!(resolved.fields.get("bool_field"), Some(&Value::Bool(bool_val))); + assert_eq!(resolved.fields.get("string_field"), Some(&Value::String(string_val))); + } + + #[test] + fn test_unicode_in_names_and_values( + name in "[a-zA-Z_\u{0080}-\u{00FF}]{1,20}", + field_name in "[a-zA-Z_\u{0080}-\u{00FF}]{1,20}".prop_filter("not reserved", |s| { + !RESERVED_KEYWORDS.contains(&s.as_str()) + }), + string_val in "[a-zA-Z0-9 \u{0080}-\u{00FF}]{0,30}" + ) { + let character = Character { + name: name.clone(), + fields: vec![Field { + name: field_name.clone(), + value: Value::String(string_val.clone()), + span: Span::new(0, 10), + }], + template: None, + span: Span::new(0, 50), + }; + + let resolved = convert_character(&character).unwrap(); + assert_eq!(resolved.name, name); + assert_eq!( + resolved.fields.get(&field_name), + Some(&Value::String(string_val)) + ); + } + } +} diff --git a/src/resolve/integration_tests.rs b/src/resolve/integration_tests.rs new file mode 100644 index 0000000..9decf0c --- /dev/null +++ b/src/resolve/integration_tests.rs @@ -0,0 +1,164 @@ +//! Integration tests for the resolution engine + +use crate::{ + resolve::names::{ + DeclKind, + NameTable, + }, + syntax::{ + lexer::Lexer, + FileParser, + }, +}; + +fn parse(source: &str) -> crate::syntax::ast::File { + let lexer = Lexer::new(source); + let parser = FileParser::new(); + parser.parse(lexer).expect("Should parse successfully") +} + +#[test] +fn test_name_resolution_example_file() { + let source = r#" + character Alice { + age: 30 + } + + character Bob { + age: 35 + } + + template PersonTemplate { + age: 18..80 + } + + enum Status { + active, + inactive + } + "#; + + let file = parse(source); + let table = NameTable::from_file(&file).expect("Should build name table"); + + // Verify all names are registered + assert!(table.lookup(&["Alice".to_string()]).is_some()); + assert!(table.lookup(&["Bob".to_string()]).is_some()); + assert!(table.lookup(&["PersonTemplate".to_string()]).is_some()); + assert!(table.lookup(&["Status".to_string()]).is_some()); + + // Verify kind filtering + assert_eq!(table.entries_of_kind(DeclKind::Character).count(), 2); + assert_eq!(table.entries_of_kind(DeclKind::Template).count(), 1); + assert_eq!(table.entries_of_kind(DeclKind::Enum).count(), 1); +} + +#[test] +fn test_use_statements_are_parsed() { + let source = r#" + use characters::Martha; + use templates::{Person, NPC}; + use locations::*; + + character LocalChar { + age: 25 + } + "#; + + let file = parse(source); + let table = NameTable::from_file(&file).expect("Should build name table"); + + // Verify imports were collected + assert_eq!(table.imports().len(), 3); + + // Verify local declaration is registered + assert!(table.lookup(&["LocalChar".to_string()]).is_some()); +} + +#[test] +fn test_duplicate_name_error() { + let source = r#" + character Martha { + age: 30 + } + + character Martha { + age: 35 + } + "#; + + let file = parse(source); + let result = NameTable::from_file(&file); + + // Should fail with duplicate error + assert!(result.is_err()); +} + +#[test] +fn test_fuzzy_matching_suggestion() { + let source = r#" + character Elizabeth { + age: 30 + } + "#; + + let file = parse(source); + let table = NameTable::from_file(&file).expect("Should build name table"); + + // Typo "Elizabet" should suggest "Elizabeth" + let suggestion = table.find_suggestion("Elizabet"); + assert_eq!(suggestion, Some("Elizabeth".to_string())); + + // Typo "Elizabth" should also suggest "Elizabeth" + let suggestion = table.find_suggestion("Elizabth"); + assert_eq!(suggestion, Some("Elizabeth".to_string())); +} + +#[test] +fn test_all_declaration_kinds() { + let source = r#" + character C { age: 1 } + template T { age: 1..2 } + life_arc L { + state s {} + } + schedule S { + 10:00 -> 11:00: activity + } + behavior B { + action + } + institution I { + name: "Test" + } + relationship R { + C + C + } + location Loc { + name: "Place" + } + species Sp { + lifespan: 100 + } + enum E { + a, + b + } + "#; + + let file = parse(source); + let table = NameTable::from_file(&file).expect("Should build name table"); + + // All 10 declaration kinds should be represented + assert_eq!(table.entries_of_kind(DeclKind::Character).count(), 1); + assert_eq!(table.entries_of_kind(DeclKind::Template).count(), 1); + assert_eq!(table.entries_of_kind(DeclKind::LifeArc).count(), 1); + assert_eq!(table.entries_of_kind(DeclKind::Schedule).count(), 1); + assert_eq!(table.entries_of_kind(DeclKind::Behavior).count(), 1); + assert_eq!(table.entries_of_kind(DeclKind::Institution).count(), 1); + assert_eq!(table.entries_of_kind(DeclKind::Relationship).count(), 1); + assert_eq!(table.entries_of_kind(DeclKind::Location).count(), 1); + assert_eq!(table.entries_of_kind(DeclKind::Species).count(), 1); + assert_eq!(table.entries_of_kind(DeclKind::Enum).count(), 1); +} diff --git a/src/resolve/links.rs b/src/resolve/links.rs new file mode 100644 index 0000000..083af8c --- /dev/null +++ b/src/resolve/links.rs @@ -0,0 +1,325 @@ +//! Bidirectional relationship resolution +//! +//! Handles relationships that can be declared from either participant's +//! perspective, merging self/other blocks and validating consistency. + +use std::collections::HashMap; + +use crate::{ + resolve::{ + ResolveError, + Result, + }, + syntax::ast::{ + Declaration, + Field, + File, + Participant, + Relationship, + }, +}; + +/// A relationship key that's order-independent +/// (Martha, David) and (David, Martha) map to the same key +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +struct RelationshipKey { + participants: Vec, + name: String, +} + +impl RelationshipKey { + fn new(mut participants: Vec, name: String) -> Self { + // Sort participants to make key order-independent + participants.sort(); + Self { participants, name } + } +} + +/// Information about a relationship declaration +#[derive(Debug, Clone)] +struct RelationshipDecl { + relationship: Relationship, + /// Which participant is "self" (index into participants) + self_index: Option, +} + +/// Resolved bidirectional relationship +#[derive(Debug, Clone)] +pub struct ResolvedRelationship { + pub name: String, + pub participants: Vec, + pub fields: Vec, + /// Merged self/other blocks for each participant + pub participant_fields: Vec, +} + +#[derive(Debug, Clone)] +pub struct ParticipantFields { + pub participant_name: Vec, + pub role: Option, + /// Fields from this participant's "self" block + pub self_fields: Vec, + /// Fields from this participant's "other" block (about other participants) + pub other_fields: Vec, +} + +/// Resolve bidirectional relationships in a file +pub fn resolve_relationships(file: &File) -> Result> { + // Group relationships by key + let mut relationship_groups: HashMap> = HashMap::new(); + + for decl in &file.declarations { + if let Declaration::Relationship(rel) = decl { + // Extract participant names + let participant_names: Vec = + rel.participants.iter().map(|p| p.name.join("::")).collect(); + + let key = RelationshipKey::new(participant_names, rel.name.clone()); + + // Determine which participant is "self" based on self/other blocks + let self_index = rel + .participants + .iter() + .position(|p| p.self_block.is_some() || p.other_block.is_some()); + + relationship_groups + .entry(key) + .or_default() + .push(RelationshipDecl { + relationship: rel.clone(), + self_index, + }); + } + } + + // Merge grouped relationships + let mut resolved = Vec::new(); + for (key, decls) in relationship_groups { + let merged = merge_relationship_declarations(&key, decls)?; + resolved.push(merged); + } + + Ok(resolved) +} + +/// Merge multiple declarations of the same relationship +fn merge_relationship_declarations( + key: &RelationshipKey, + decls: Vec, +) -> Result { + if decls.is_empty() { + return Err(ResolveError::ValidationError { + message: "Empty relationship group".to_string(), + help: Some("This is an internal error - relationship groups should never be empty. Please report this as a bug.".to_string()), + }); + } + + // Start with the first declaration + let base = &decls[0].relationship; + let mut participant_fields: Vec = base + .participants + .iter() + .map(|p| ParticipantFields { + participant_name: p.name.clone(), + role: p.role.clone(), + self_fields: p.self_block.clone().unwrap_or_default(), + other_fields: p.other_block.clone().unwrap_or_default(), + }) + .collect(); + + // Merge additional declarations + for decl in decls.iter().skip(1) { + // If this declaration specifies a different participant as "self", + // merge their self/other blocks appropriately + if let Some(self_idx) = decl.self_index { + let participant_name = &decl.relationship.participants[self_idx].name; + + // Find this participant in our merged list + if let Some(idx) = participant_fields + .iter() + .position(|pf| &pf.participant_name == participant_name) + { + // Merge self blocks + let self_block = decl.relationship.participants[self_idx] + .self_block + .clone() + .unwrap_or_default(); + merge_fields(&mut participant_fields[idx].self_fields, self_block)?; + + // Merge other blocks + let other_block = decl.relationship.participants[self_idx] + .other_block + .clone() + .unwrap_or_default(); + merge_fields(&mut participant_fields[idx].other_fields, other_block)?; + } + } + } + + // Merge shared fields (fields outside self/other blocks) + let mut merged_fields = base.fields.clone(); + for decl in decls.iter().skip(1) { + merge_fields(&mut merged_fields, decl.relationship.fields.clone())?; + } + + Ok(ResolvedRelationship { + name: key.name.clone(), + participants: base.participants.clone(), + fields: merged_fields, + participant_fields, + }) +} + +/// Merge field lists, detecting conflicts +fn merge_fields(target: &mut Vec, source: Vec) -> Result<()> { + for new_field in source { + // Check if field already exists + if let Some(existing) = target.iter().find(|f| f.name == new_field.name) { + // Fields must have the same value + if existing.value != new_field.value { + return Err(ResolveError::ValidationError { + message: format!( + "Conflicting values for field '{}' in relationship", + new_field.name + ), + help: Some(format!( + "The field '{}' has different values in different declarations of the same relationship. Make sure all declarations of this relationship use the same value for shared fields.", + new_field.name + )), + }); + } + // Same value, no need to add again + } else { + // New field, add it + target.push(new_field); + } + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::syntax::ast::{ + Span, + Value, + }; + + fn make_participant(name: &str, role: Option<&str>) -> Participant { + Participant { + name: vec![name.to_string()], + role: role.map(|s| s.to_string()), + self_block: None, + other_block: None, + span: Span::new(0, 10), + } + } + + fn make_field(name: &str, value: i64) -> Field { + Field { + name: name.to_string(), + value: Value::Int(value), + span: Span::new(0, 10), + } + } + + #[test] + fn test_relationship_key_order_independent() { + let key1 = RelationshipKey::new( + vec!["Martha".to_string(), "David".to_string()], + "Marriage".to_string(), + ); + let key2 = RelationshipKey::new( + vec!["David".to_string(), "Martha".to_string()], + "Marriage".to_string(), + ); + assert_eq!(key1, key2); + } + + #[test] + fn test_single_relationship_declaration() { + let file = File { + declarations: vec![Declaration::Relationship(Relationship { + name: "Friendship".to_string(), + participants: vec![ + make_participant("Alice", None), + make_participant("Bob", None), + ], + fields: vec![make_field("bond", 80)], + span: Span::new(0, 10), + })], + }; + + let resolved = resolve_relationships(&file).unwrap(); + assert_eq!(resolved.len(), 1); + assert_eq!(resolved[0].name, "Friendship"); + assert_eq!(resolved[0].participants.len(), 2); + } + + #[test] + fn test_bidirectional_relationship_merge() { + let mut martha_participant = make_participant("Martha", Some("spouse")); + martha_participant.self_block = Some(vec![make_field("bond", 90)]); + martha_participant.other_block = Some(vec![make_field("trust", 85)]); + + let mut david_participant = make_participant("David", Some("spouse")); + david_participant.self_block = Some(vec![make_field("bond", 90)]); + david_participant.other_block = Some(vec![make_field("trust", 85)]); + + let file = File { + declarations: vec![ + Declaration::Relationship(Relationship { + name: "Marriage".to_string(), + participants: vec![ + martha_participant.clone(), + make_participant("David", Some("spouse")), + ], + fields: vec![], + span: Span::new(0, 10), + }), + Declaration::Relationship(Relationship { + name: "Marriage".to_string(), + participants: vec![ + david_participant.clone(), + make_participant("Martha", Some("spouse")), + ], + fields: vec![], + span: Span::new(20, 30), + }), + ], + }; + + let resolved = resolve_relationships(&file).unwrap(); + assert_eq!(resolved.len(), 1); + assert_eq!(resolved[0].name, "Marriage"); + } + + #[test] + fn test_conflicting_field_values() { + let mut p1 = make_participant("Alice", None); + p1.self_block = Some(vec![make_field("bond", 80)]); + + let mut p2 = make_participant("Alice", None); + p2.self_block = Some(vec![make_field("bond", 90)]); // Different value + + let file = File { + declarations: vec![ + Declaration::Relationship(Relationship { + name: "Test".to_string(), + participants: vec![p1, make_participant("Bob", None)], + fields: vec![], + span: Span::new(0, 10), + }), + Declaration::Relationship(Relationship { + name: "Test".to_string(), + participants: vec![p2, make_participant("Bob", None)], + fields: vec![], + span: Span::new(20, 30), + }), + ], + }; + + let result = resolve_relationships(&file); + assert!(result.is_err()); + } +} diff --git a/src/resolve/links_prop_tests.rs b/src/resolve/links_prop_tests.rs new file mode 100644 index 0000000..7fc8868 --- /dev/null +++ b/src/resolve/links_prop_tests.rs @@ -0,0 +1,486 @@ +//! Property tests for bidirectional relationship resolution + +use proptest::prelude::*; + +use crate::{ + resolve::links::resolve_relationships, + syntax::ast::*, +}; + +// ===== Generators ===== + +fn valid_ident() -> impl Strategy { + "[a-zA-Z_][a-zA-Z0-9_]{0,15}".prop_filter("not a keyword", |s| { + !matches!( + s.as_str(), + "use" | + "character" | + "template" | + "life_arc" | + "schedule" | + "behavior" | + "institution" | + "relationship" | + "location" | + "species" | + "enum" | + "state" | + "on" | + "as" | + "self" | + "other" | + "remove" | + "append" | + "forall" | + "exists" | + "in" | + "where" | + "and" | + "or" | + "not" | + "is" | + "true" | + "false" + ) + }) +} + +fn valid_field() -> impl Strategy { + (valid_ident(), 0i64..100).prop_map(|(name, value)| Field { + name, + value: Value::Int(value), + span: Span::new(0, 10), + }) +} + +fn valid_field_list() -> impl Strategy> { + prop::collection::vec(valid_field(), 0..5) + // Ensure unique field names + .prop_map(|fields| { + let mut unique_fields = Vec::new(); + let mut seen_names = std::collections::HashSet::new(); + + for field in fields { + if seen_names.insert(field.name.clone()) { + unique_fields.push(field); + } + } + unique_fields + }) +} + +fn valid_participant(name: String) -> impl Strategy { + prop::option::of(valid_ident()).prop_map(move |role| Participant { + name: vec![name.clone()], + role, + self_block: None, + other_block: None, + span: Span::new(0, 10), + }) +} + +#[allow(dead_code)] +fn valid_participant_with_blocks(name: String) -> impl Strategy { + ( + prop::option::of(valid_ident()), + prop::option::of(valid_field_list()), + prop::option::of(valid_field_list()), + ) + .prop_map(move |(role, self_block, other_block)| Participant { + name: vec![name.clone()], + role, + self_block, + other_block, + span: Span::new(0, 10), + }) +} + +fn valid_relationship() -> impl Strategy { + ( + valid_ident(), + valid_ident(), + valid_ident(), + valid_field_list(), + ) + .prop_flat_map(|(rel_name, person1, person2, fields)| { + ( + Just(rel_name), + valid_participant(person1.clone()), + valid_participant(person2.clone()), + Just(fields), + ) + }) + .prop_map(|(name, p1, p2, fields)| Relationship { + name, + participants: vec![p1, p2], + fields, + span: Span::new(0, 10), + }) +} + +fn valid_bidirectional_relationship() -> impl Strategy { + ( + valid_ident(), + valid_ident(), + valid_ident(), + valid_field_list(), + valid_field_list(), + ) + .prop_flat_map(|(rel_name, person1, person2, shared_fields, self_fields)| { + let self_fields_clone = self_fields.clone(); + ( + Just(rel_name.clone()), + Just(person1.clone()), + Just(person2.clone()), + Just(shared_fields.clone()), + Just(self_fields), + Just(self_fields_clone), + ) + }) + .prop_map(|(name, p1_name, p2_name, shared, p1_self, p2_self)| { + // First declaration from p1's perspective + let p1 = Participant { + name: vec![p1_name.clone()], + role: None, + self_block: Some(p1_self), + other_block: None, + span: Span::new(0, 10), + }; + let p2_in_p1_rel = Participant { + name: vec![p2_name.clone()], + role: None, + self_block: None, + other_block: None, + span: Span::new(0, 10), + }; + + let rel1 = Relationship { + name: name.clone(), + participants: vec![p1, p2_in_p1_rel], + fields: shared.clone(), + span: Span::new(0, 10), + }; + + // Second declaration from p2's perspective + let p2 = Participant { + name: vec![p2_name], + role: None, + self_block: Some(p2_self), + other_block: None, + span: Span::new(20, 30), + }; + let p1_in_p2_rel = Participant { + name: vec![p1_name], + role: None, + self_block: None, + other_block: None, + span: Span::new(20, 30), + }; + + let rel2 = Relationship { + name, + participants: vec![p2, p1_in_p2_rel], + fields: shared, + span: Span::new(20, 30), + }; + + (rel1, rel2) + }) +} + +// ===== Property Tests ===== + +proptest! { + #[test] + fn test_single_relationship_always_resolves(rel in valid_relationship()) { + let file = File { + declarations: vec![Declaration::Relationship(rel)], + }; + + let result = resolve_relationships(&file); + assert!(result.is_ok(), "Single relationship should always resolve"); + + let resolved = result.unwrap(); + assert_eq!(resolved.len(), 1); + } + + #[test] + fn test_relationship_participant_count_preserved(rel in valid_relationship()) { + let file = File { + declarations: vec![Declaration::Relationship(rel.clone())], + }; + + let resolved = resolve_relationships(&file).unwrap(); + assert_eq!(resolved[0].participants.len(), rel.participants.len()); + } + + #[test] + fn test_relationship_fields_preserved(rel in valid_relationship()) { + let file = File { + declarations: vec![Declaration::Relationship(rel.clone())], + }; + + let resolved = resolve_relationships(&file).unwrap(); + assert_eq!(resolved[0].fields.len(), rel.fields.len()); + } + + #[test] + fn test_bidirectional_relationships_merge( + (rel1, rel2) in valid_bidirectional_relationship() + ) { + let file = File { + declarations: vec![ + Declaration::Relationship(rel1), + Declaration::Relationship(rel2), + ], + }; + + let result = resolve_relationships(&file); + assert!(result.is_ok(), "Bidirectional relationships should merge successfully"); + + let resolved = result.unwrap(); + // Should merge into single relationship + assert_eq!(resolved.len(), 1); + } + + #[test] + fn test_participant_order_doesnt_matter( + name in valid_ident(), + p1 in valid_ident(), + p2 in valid_ident(), + fields in valid_field_list() + ) { + // Create two identical relationships with participants in different order + let rel1 = Relationship { + name: name.clone(), + participants: vec![ + Participant { + name: vec![p1.clone()], + role: None, + self_block: None, + other_block: None, + span: Span::new(0, 10), + }, + Participant { + name: vec![p2.clone()], + role: None, + self_block: None, + other_block: None, + span: Span::new(0, 10), + }, + ], + fields: fields.clone(), + span: Span::new(0, 10), + }; + + let rel2 = Relationship { + name: name.clone(), + participants: vec![ + Participant { + name: vec![p2.clone()], + role: None, + self_block: None, + other_block: None, + span: Span::new(20, 30), + }, + Participant { + name: vec![p1.clone()], + role: None, + self_block: None, + other_block: None, + span: Span::new(20, 30), + }, + ], + fields, + span: Span::new(20, 30), + }; + + let file = File { + declarations: vec![ + Declaration::Relationship(rel1), + Declaration::Relationship(rel2), + ], + }; + + let result = resolve_relationships(&file); + assert!(result.is_ok()); + + let resolved = result.unwrap(); + // Should recognize as same relationship despite order + assert_eq!(resolved.len(), 1); + } + + #[test] + fn test_different_relationships_stay_separate( + name1 in valid_ident(), + name2 in valid_ident(), + p1 in valid_ident(), + p2 in valid_ident() + ) { + // Skip if names are the same + if name1 == name2 { + return Ok(()); + } + let rel1 = Relationship { + name: name1, + participants: vec![ + Participant { + name: vec![p1.clone()], + role: None, + self_block: None, + other_block: None, + span: Span::new(0, 10), + }, + Participant { + name: vec![p2.clone()], + role: None, + self_block: None, + other_block: None, + span: Span::new(0, 10), + }, + ], + fields: vec![], + span: Span::new(0, 10), + }; + + let rel2 = Relationship { + name: name2, + participants: vec![ + Participant { + name: vec![p1], + role: None, + self_block: None, + other_block: None, + span: Span::new(20, 30), + }, + Participant { + name: vec![p2], + role: None, + self_block: None, + other_block: None, + span: Span::new(20, 30), + }, + ], + fields: vec![], + span: Span::new(20, 30), + }; + + let file = File { + declarations: vec![ + Declaration::Relationship(rel1), + Declaration::Relationship(rel2), + ], + }; + + let result = resolve_relationships(&file); + assert!(result.is_ok()); + + let resolved = result.unwrap(); + // Different relationship names should stay separate + assert_eq!(resolved.len(), 2); + } + + #[test] + fn test_self_blocks_are_merged( + name in valid_ident(), + p1 in valid_ident(), + p2 in valid_ident(), + fields1 in valid_field_list(), + fields2 in valid_field_list() + ) { + let participant1 = Participant { + name: vec![p1.clone()], + role: None, + self_block: Some(fields1), + other_block: None, + span: Span::new(0, 10), + }; + + let participant1_again = Participant { + name: vec![p1.clone()], + role: None, + self_block: Some(fields2), + other_block: None, + span: Span::new(20, 30), + }; + + let rel1 = Relationship { + name: name.clone(), + participants: vec![ + participant1, + Participant { + name: vec![p2.clone()], + role: None, + self_block: None, + other_block: None, + span: Span::new(0, 10), + }, + ], + fields: vec![], + span: Span::new(0, 10), + }; + + let rel2 = Relationship { + name: name.clone(), + participants: vec![ + participant1_again, + Participant { + name: vec![p2], + role: None, + self_block: None, + other_block: None, + span: Span::new(20, 30), + }, + ], + fields: vec![], + span: Span::new(20, 30), + }; + + let file = File { + declarations: vec![ + Declaration::Relationship(rel1), + Declaration::Relationship(rel2), + ], + }; + + let result = resolve_relationships(&file); + // Should succeed unless there are conflicting field values + // (which is tested separately) + if result.is_ok() { + let resolved = result.unwrap(); + assert_eq!(resolved.len(), 1); + } + } + + #[test] + fn test_empty_file_gives_empty_result( + decls in prop::collection::vec( + prop_oneof![ + valid_ident().prop_map(|name| Declaration::Character(Character { + name, + fields: vec![], + template: None, + + span: Span::new(0, 10), + })), + valid_ident().prop_map(|name| Declaration::Template(Template { + name, + fields: vec![], + strict: false, + includes: vec![], + span: Span::new(0, 10), + })), + ], + 0..5 + ) + ) { + // File with no relationships + let file = File { declarations: decls }; + + let result = resolve_relationships(&file); + assert!(result.is_ok()); + + let resolved = result.unwrap(); + assert_eq!(resolved.len(), 0); + } +} diff --git a/src/resolve/merge.rs b/src/resolve/merge.rs new file mode 100644 index 0000000..5e7682c --- /dev/null +++ b/src/resolve/merge.rs @@ -0,0 +1,762 @@ +//! Template composition and merge engine +//! +//! Handles two types of template composition: +//! 1. Template includes (vertical composition): `template Person { include +//! Human ... }` +//! 2. Character template inheritance (horizontal composition): `character +//! Martha from Person, Worker { ... }` +//! +//! Also handles legacy @BaseTemplate { ... } syntax for template overrides +//! with: +//! - Set operations (field: value) - replace or add field +//! - Remove operations (remove field) - delete field +//! - Append operations (append field: value) - add new field (error if exists) + +use std::collections::HashSet; + +use crate::{ + resolve::{ + names::NameTable, + ResolveError, + Result, + }, + syntax::ast::{ + Character, + Declaration, + Field, + OverrideOp, + Template, + Value, + }, +}; + +// ===== Template Composition ===== + +/// Resolve a template by recursively merging all its includes +/// +/// Algorithm: +/// 1. Recursively resolve all included templates (depth-first) +/// 2. Merge included template fields (later includes override earlier ones) +/// 3. Add the template's own fields on top +/// +/// Returns the fully merged fields for this template +pub fn resolve_template_includes( + template: &Template, + declarations: &[Declaration], + name_table: &NameTable, + visited: &mut HashSet, +) -> Result> { + // Detect circular includes + if !visited.insert(template.name.clone()) { + return Err(ResolveError::CircularDependency { + cycle: format!( + "Circular template include detected: {} -> {}", + visited.iter().cloned().collect::>().join(" -> "), + template.name + ), + }); + } + + let mut merged_fields = Vec::new(); + + // Resolve all includes first + for include_name in &template.includes { + // Look up the included template + let entry = name_table + .lookup(std::slice::from_ref(include_name)) + .ok_or_else(|| ResolveError::NameNotFound { + name: include_name.clone(), + suggestion: name_table.find_suggestion(include_name), + })?; + + // Get the template declaration + let included_template = match &declarations[entry.decl_index] { + | Declaration::Template(t) => t, + | _ => { + return Err(ResolveError::ValidationError { + message: format!( + "Cannot include '{}': it's not a template", + include_name + ), + help: Some(format!( + "The 'include' keyword can only reference templates. '{}' is a different type of declaration. Make sure you're including the correct name and that it refers to a template.", + include_name + )), + }); + }, + }; + + // Recursively resolve the included template + let included_fields = + resolve_template_includes(included_template, declarations, name_table, visited)?; + + // Merge included fields (replacing any existing fields with same name) + merged_fields = merge_field_lists(merged_fields, included_fields); + } + + // Add this template's own fields on top + merged_fields = merge_field_lists(merged_fields, template.fields.clone()); + + // Remove this template from visited set (allow it to be used in other branches) + visited.remove(&template.name); + + Ok(merged_fields) +} + +/// Merge character templates into character fields +/// +/// Algorithm: +/// 1. Resolve each template (which may itself include other templates) +/// 2. Merge templates left to right (later templates override earlier ones) +/// 3. Add character's own fields on top +/// 4. If any template is strict, validate that all its fields are concrete +/// +/// Returns the fully merged fields for this character +pub fn merge_character_templates( + character: &Character, + declarations: &[Declaration], + name_table: &NameTable, +) -> Result> { + let mut merged_fields = Vec::new(); + let mut strict_templates = Vec::new(); + + // If character has templates, merge them + if let Some(template_names) = &character.template { + for template_name in template_names { + // Look up the template + let entry = name_table + .lookup(std::slice::from_ref(template_name)) + .ok_or_else(|| ResolveError::NameNotFound { + name: template_name.clone(), + suggestion: name_table.find_suggestion(template_name), + })?; + + // Get the template declaration + let template = match &declarations[entry.decl_index] { + | Declaration::Template(t) => t, + | _ => { + return Err(ResolveError::ValidationError { + message: format!( + "Character '{}' cannot inherit from '{}': it's not a template", + character.name, template_name + ), + help: Some(format!( + "The 'from' keyword can only reference templates. '{}' is a different type of declaration. Make sure you're inheriting from the correct name and that it refers to a template.", + template_name + )), + }); + }, + }; + + // Track strict templates for validation + if template.strict { + strict_templates.push(template_name.clone()); + } + + // Resolve template (which handles includes recursively) + let mut visited = HashSet::new(); + let template_fields = + resolve_template_includes(template, declarations, name_table, &mut visited)?; + + // Merge template fields into accumulated fields + merged_fields = merge_field_lists(merged_fields, template_fields); + } + } + + // Add character's own fields on top + merged_fields = merge_field_lists(merged_fields, character.fields.clone()); + + // Validate strict mode: all strict template fields must have concrete values + if !strict_templates.is_empty() { + validate_strict_templates(&character.name, &merged_fields, &strict_templates)?; + } + + Ok(merged_fields) +} + +/// Merge two field lists, with fields from the second list overriding the first +fn merge_field_lists(base: Vec, override_fields: Vec) -> Vec { + let mut merged = base; + + for field in override_fields { + // If field exists, replace it; otherwise add it + if let Some(existing) = merged.iter_mut().find(|f| f.name == field.name) { + existing.value = field.value.clone(); + existing.span = field.span.clone(); + } else { + merged.push(field); + } + } + + merged +} + +/// Validate that strict template requirements are met +/// +/// For strict templates, all fields must have concrete values (not ranges) +fn validate_strict_templates( + character_name: &str, + fields: &[Field], + strict_templates: &[String], +) -> Result<()> { + for field in fields { + if let Value::Range(_, _) = &field.value { + return Err(ResolveError::ValidationError { + message: format!( + "Character '{}' inherits from strict template(s) {}, but field '{}' has a range value instead of a concrete value", + character_name, + strict_templates.join(", "), + field.name + ), + help: Some(format!( + "Strict templates require all fields to have concrete values. Replace the range in '{}' with a specific value. For example, instead of '18..65', use a specific age like '34'.", + field.name + )), + }); + } + } + + Ok(()) +} + +// ===== Legacy Override System ===== + +/// Apply an override to a base template's fields +/// +/// This performs a structural merge: +/// 1. Start with all fields from base +/// 2. Apply each override operation in order +/// 3. Return merged field list +pub fn apply_override(base_fields: Vec, override_ops: &[OverrideOp]) -> Result> { + let mut merged = base_fields; + + for op in override_ops { + match op { + | OverrideOp::Set(field) => { + // Replace existing field or add new one + if let Some(existing) = merged.iter_mut().find(|f| f.name == field.name) { + existing.value = field.value.clone(); + existing.span = field.span.clone(); + } else { + merged.push(field.clone()); + } + }, + + | OverrideOp::Remove(name) => { + // Remove field if it exists + merged.retain(|f| f.name != *name); + }, + + | OverrideOp::Append(field) => { + // Add field only if it doesn't exist + if merged.iter().any(|f| f.name == field.name) { + return Err(ResolveError::ValidationError { + message: format!( + "Cannot append field '{}': field already exists", + field.name + ), + help: Some(format!( + "The 'append' operation is used to add new fields that don't exist in the base template. The field '{}' already exists. Use 'set' instead to update an existing field, or use a different field name.", + field.name + )), + }); + } + merged.push(field.clone()); + }, + } + } + + Ok(merged) +} + +/// Recursively resolve overrides in a value +/// +/// If the value contains an Override, look up the base template +/// and apply the override operations +pub fn resolve_value_overrides(value: &Value, name_table: &NameTable) -> Result { + match value { + | Value::Override(override_spec) => { + // Look up the base template + let _base_entry = name_table.lookup(&override_spec.base).ok_or_else(|| { + ResolveError::NameNotFound { + name: override_spec.base.join("::"), + suggestion: name_table + .find_suggestion(override_spec.base.last().unwrap_or(&String::new())), + } + })?; + + // For now, we'll return an error since we need the actual template fields + // In a full implementation, we'd extract the fields from the base declaration + Err(ResolveError::ValidationError { + message: format!( + "Override resolution not yet fully implemented for base '{}'", + override_spec.base.join("::") + ), + help: Some("Template overrides are not yet supported. This feature is planned for a future release. For now, define characters directly without using template inheritance.".to_string()), + }) + }, + + | Value::List(items) => { + // Recursively resolve overrides in list items + let resolved: Result> = items + .iter() + .map(|v| resolve_value_overrides(v, name_table)) + .collect(); + Ok(Value::List(resolved?)) + }, + + | Value::Object(fields) => { + // Recursively resolve overrides in object fields + let resolved_fields: Result> = fields + .iter() + .map(|f| { + let resolved_value = resolve_value_overrides(&f.value, name_table)?; + Ok(Field { + name: f.name.clone(), + value: resolved_value, + span: f.span.clone(), + }) + }) + .collect(); + Ok(Value::Object(resolved_fields?)) + }, + + // Other value types don't contain overrides + | _ => Ok(value.clone()), + } +} + +/// Check if applying the same override twice gives the same result +/// (idempotence) +pub fn is_idempotent(base: &[Field], ops: &[OverrideOp]) -> bool { + let result1 = apply_override(base.to_vec(), ops); + if result1.is_err() { + return false; + } + + let intermediate = result1.unwrap(); + let result2 = apply_override(intermediate.clone(), ops); + if result2.is_err() { + return false; + } + + // Should get the same result + intermediate == result2.unwrap() +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::syntax::ast::Span; + + fn make_field(name: &str, value: i64) -> Field { + Field { + name: name.to_string(), + value: Value::Int(value), + span: Span::new(0, 10), + } + } + + #[test] + fn test_set_replaces_existing_field() { + let base = vec![make_field("age", 25), make_field("health", 100)]; + + let ops = vec![OverrideOp::Set(make_field("age", 30))]; + + let result = apply_override(base, &ops).unwrap(); + assert_eq!(result.len(), 2); + + let age_field = result.iter().find(|f| f.name == "age").unwrap(); + assert_eq!(age_field.value, Value::Int(30)); + } + + #[test] + fn test_set_adds_new_field() { + let base = vec![make_field("age", 25)]; + + let ops = vec![OverrideOp::Set(make_field("health", 100))]; + + let result = apply_override(base, &ops).unwrap(); + assert_eq!(result.len(), 2); + assert!(result.iter().any(|f| f.name == "health")); + } + + #[test] + fn test_remove_deletes_field() { + let base = vec![ + make_field("age", 25), + make_field("health", 100), + make_field("energy", 50), + ]; + + let ops = vec![OverrideOp::Remove("health".to_string())]; + + let result = apply_override(base, &ops).unwrap(); + assert_eq!(result.len(), 2); + assert!(!result.iter().any(|f| f.name == "health")); + assert!(result.iter().any(|f| f.name == "age")); + assert!(result.iter().any(|f| f.name == "energy")); + } + + #[test] + fn test_remove_nonexistent_field_is_noop() { + let base = vec![make_field("age", 25)]; + + let ops = vec![OverrideOp::Remove("nonexistent".to_string())]; + + let result = apply_override(base, &ops).unwrap(); + assert_eq!(result.len(), 1); + assert_eq!(result[0].name, "age"); + } + + #[test] + fn test_append_adds_new_field() { + let base = vec![make_field("age", 25)]; + + let ops = vec![OverrideOp::Append(make_field("health", 100))]; + + let result = apply_override(base, &ops).unwrap(); + assert_eq!(result.len(), 2); + assert!(result.iter().any(|f| f.name == "health")); + } + + #[test] + fn test_append_existing_field_errors() { + let base = vec![make_field("age", 25)]; + + let ops = vec![OverrideOp::Append(make_field("age", 30))]; + + let result = apply_override(base, &ops); + assert!(result.is_err()); + } + + #[test] + fn test_multiple_operations() { + let base = vec![ + make_field("age", 25), + make_field("health", 100), + make_field("energy", 50), + ]; + + let ops = vec![ + OverrideOp::Set(make_field("age", 30)), + OverrideOp::Remove("energy".to_string()), + OverrideOp::Append(make_field("strength", 75)), + ]; + + let result = apply_override(base, &ops).unwrap(); + assert_eq!(result.len(), 3); + + let age = result.iter().find(|f| f.name == "age").unwrap(); + assert_eq!(age.value, Value::Int(30)); + + assert!(!result.iter().any(|f| f.name == "energy")); + assert!(result.iter().any(|f| f.name == "strength")); + } + + #[test] + fn test_set_is_idempotent() { + let base = vec![make_field("age", 25)]; + let ops = vec![OverrideOp::Set(make_field("age", 30))]; + + assert!(is_idempotent(&base, &ops)); + } + + #[test] + fn test_remove_is_idempotent() { + let base = vec![make_field("age", 25), make_field("health", 100)]; + let ops = vec![OverrideOp::Remove("health".to_string())]; + + assert!(is_idempotent(&base, &ops)); + } + + #[test] + fn test_append_is_not_idempotent() { + let base = vec![make_field("age", 25)]; + let ops = vec![OverrideOp::Append(make_field("health", 100))]; + + // Append is NOT idempotent because second application would try to + // append to a list that already has the field + assert!(!is_idempotent(&base, &ops)); + } + + // ===== Template Composition Tests ===== + + use crate::syntax::ast::File; + + fn make_file(declarations: Vec) -> File { + File { declarations } + } + + fn make_template( + name: &str, + fields: Vec, + includes: Vec<&str>, + strict: bool, + ) -> Template { + Template { + name: name.to_string(), + fields, + includes: includes.iter().map(|s| s.to_string()).collect(), + strict, + span: Span::new(0, 10), + } + } + + fn make_character(name: &str, fields: Vec, templates: Vec<&str>) -> Character { + Character { + name: name.to_string(), + fields, + template: if templates.is_empty() { + None + } else { + Some(templates.iter().map(|s| s.to_string()).collect()) + }, + span: Span::new(0, 10), + } + } + + #[test] + fn test_resolve_template_with_no_includes() { + let template = make_template("Person", vec![make_field("age", 25)], vec![], false); + let declarations = vec![Declaration::Template(template.clone())]; + let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap(); + let mut visited = HashSet::new(); + + let result = + resolve_template_includes(&template, &declarations, &name_table, &mut visited).unwrap(); + + assert_eq!(result.len(), 1); + assert_eq!(result[0].name, "age"); + assert_eq!(result[0].value, Value::Int(25)); + } + + #[test] + fn test_resolve_template_with_single_include() { + let base = make_template("Human", vec![make_field("age", 0)], vec![], false); + let derived = make_template("Person", vec![make_field("name", 0)], vec!["Human"], false); + + let declarations = vec![ + Declaration::Template(base), + Declaration::Template(derived.clone()), + ]; + let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap(); + let mut visited = HashSet::new(); + + let result = + resolve_template_includes(&derived, &declarations, &name_table, &mut visited).unwrap(); + + assert_eq!(result.len(), 2); + assert!(result.iter().any(|f| f.name == "age")); + assert!(result.iter().any(|f| f.name == "name")); + } + + #[test] + fn test_resolve_template_with_chained_includes() { + let base = make_template("Being", vec![make_field("alive", 1)], vec![], false); + let middle = make_template("Human", vec![make_field("age", 0)], vec!["Being"], false); + let top = make_template("Person", vec![make_field("name", 0)], vec!["Human"], false); + + let declarations = vec![ + Declaration::Template(base), + Declaration::Template(middle), + Declaration::Template(top.clone()), + ]; + let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap(); + let mut visited = HashSet::new(); + + let result = + resolve_template_includes(&top, &declarations, &name_table, &mut visited).unwrap(); + + assert_eq!(result.len(), 3); + assert!(result.iter().any(|f| f.name == "alive")); + assert!(result.iter().any(|f| f.name == "age")); + assert!(result.iter().any(|f| f.name == "name")); + } + + #[test] + fn test_resolve_template_field_override() { + let base = make_template("Human", vec![make_field("age", 0)], vec![], false); + let derived = make_template( + "Person", + vec![make_field("age", 25)], // Override with concrete value + vec!["Human"], + false, + ); + + let declarations = vec![ + Declaration::Template(base), + Declaration::Template(derived.clone()), + ]; + let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap(); + let mut visited = HashSet::new(); + + let result = + resolve_template_includes(&derived, &declarations, &name_table, &mut visited).unwrap(); + + assert_eq!(result.len(), 1); + assert_eq!(result[0].name, "age"); + assert_eq!(result[0].value, Value::Int(25)); // Should be overridden + // value + } + + #[test] + fn test_merge_character_templates_single() { + let template = make_template("Person", vec![make_field("age", 0)], vec![], false); + let character = make_character("Martha", vec![make_field("age", 34)], vec!["Person"]); + + let declarations = vec![ + Declaration::Template(template), + Declaration::Character(character.clone()), + ]; + let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap(); + + let result = merge_character_templates(&character, &declarations, &name_table).unwrap(); + + assert_eq!(result.len(), 1); + assert_eq!(result[0].name, "age"); + assert_eq!(result[0].value, Value::Int(34)); // Character's value + // overrides template + } + + #[test] + fn test_merge_character_templates_multiple() { + let physical = make_template("Physical", vec![make_field("height", 0)], vec![], false); + let mental = make_template("Mental", vec![make_field("iq", 0)], vec![], false); + let character = make_character( + "Martha", + vec![make_field("height", 165), make_field("iq", 120)], + vec!["Physical", "Mental"], + ); + + let declarations = vec![ + Declaration::Template(physical), + Declaration::Template(mental), + Declaration::Character(character.clone()), + ]; + let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap(); + + let result = merge_character_templates(&character, &declarations, &name_table).unwrap(); + + assert_eq!(result.len(), 2); + assert!(result + .iter() + .any(|f| f.name == "height" && f.value == Value::Int(165))); + assert!(result + .iter() + .any(|f| f.name == "iq" && f.value == Value::Int(120))); + } + + #[test] + fn test_merge_character_templates_with_includes() { + let base = make_template("Human", vec![make_field("age", 0)], vec![], false); + let derived = make_template("Person", vec![make_field("name", 0)], vec!["Human"], false); + let character = make_character( + "Martha", + vec![make_field("age", 34), make_field("name", 1)], + vec!["Person"], + ); + + let declarations = vec![ + Declaration::Template(base), + Declaration::Template(derived), + Declaration::Character(character.clone()), + ]; + let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap(); + + let result = merge_character_templates(&character, &declarations, &name_table).unwrap(); + + assert_eq!(result.len(), 2); + assert!(result + .iter() + .any(|f| f.name == "age" && f.value == Value::Int(34))); + assert!(result + .iter() + .any(|f| f.name == "name" && f.value == Value::Int(1))); + } + + #[test] + fn test_strict_template_validation_passes() { + let template = make_template("Person", vec![make_field("age", 0)], vec![], true); + let character = make_character("Martha", vec![make_field("age", 34)], vec!["Person"]); + + let declarations = vec![ + Declaration::Template(template), + Declaration::Character(character.clone()), + ]; + let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap(); + + let result = merge_character_templates(&character, &declarations, &name_table); + assert!(result.is_ok()); + } + + #[test] + fn test_strict_template_validation_fails_with_range() { + let template = make_template( + "Person", + vec![Field { + name: "age".to_string(), + value: Value::Range(Box::new(Value::Int(18)), Box::new(Value::Int(65))), + span: Span::new(0, 10), + }], + vec![], + true, + ); + let character = make_character("Martha", vec![], vec!["Person"]); + + let declarations = vec![ + Declaration::Template(template), + Declaration::Character(character.clone()), + ]; + let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap(); + + let result = merge_character_templates(&character, &declarations, &name_table); + assert!(result.is_err()); + if let Err(ResolveError::ValidationError { message, .. }) = result { + assert!(message.contains("strict template")); + assert!(message.contains("range value")); + } + } + + #[test] + fn test_circular_include_detection() { + let a = make_template("A", vec![], vec!["B"], false); + let b = make_template("B", vec![], vec!["A"], false); + + let declarations = vec![Declaration::Template(a.clone()), Declaration::Template(b)]; + let name_table = NameTable::from_file(&make_file(declarations.clone())).unwrap(); + let mut visited = HashSet::new(); + + let result = resolve_template_includes(&a, &declarations, &name_table, &mut visited); + assert!(result.is_err()); + if let Err(ResolveError::CircularDependency { .. }) = result { + // Expected + } else { + panic!("Expected CircularDependency error"); + } + } + + #[test] + fn test_merge_field_lists_override() { + let base = vec![make_field("age", 25), make_field("health", 100)]; + let overrides = vec![make_field("age", 30)]; + + let result = merge_field_lists(base, overrides); + + assert_eq!(result.len(), 2); + let age = result.iter().find(|f| f.name == "age").unwrap(); + assert_eq!(age.value, Value::Int(30)); + } + + #[test] + fn test_merge_field_lists_add_new() { + let base = vec![make_field("age", 25)]; + let overrides = vec![make_field("health", 100)]; + + let result = merge_field_lists(base, overrides); + + assert_eq!(result.len(), 2); + assert!(result.iter().any(|f| f.name == "age")); + assert!(result.iter().any(|f| f.name == "health")); + } +} diff --git a/src/resolve/merge_prop_tests.rs b/src/resolve/merge_prop_tests.rs new file mode 100644 index 0000000..e279470 --- /dev/null +++ b/src/resolve/merge_prop_tests.rs @@ -0,0 +1,338 @@ +//! Property tests for override merge engine + +use proptest::prelude::*; + +use crate::{ + resolve::merge::{ + apply_override, + is_idempotent, + }, + syntax::ast::*, +}; + +// ===== Generators ===== + +fn valid_ident() -> impl Strategy { + "[a-zA-Z_][a-zA-Z0-9_]{0,15}" +} + +fn valid_field() -> impl Strategy { + (valid_ident(), 0i64..1000).prop_map(|(name, value)| Field { + name, + value: Value::Int(value), + span: Span::new(0, 10), + }) +} + +fn valid_field_list() -> impl Strategy> { + prop::collection::vec(valid_field(), 0..10) + // Ensure unique field names + .prop_map(|fields| { + let mut unique_fields = Vec::new(); + let mut seen_names = std::collections::HashSet::new(); + + for field in fields { + if seen_names.insert(field.name.clone()) { + unique_fields.push(field); + } + } + unique_fields + }) +} + +fn valid_set_op() -> impl Strategy { + valid_field().prop_map(OverrideOp::Set) +} + +fn valid_remove_op() -> impl Strategy { + valid_ident().prop_map(OverrideOp::Remove) +} + +fn valid_append_op() -> impl Strategy { + valid_field().prop_map(OverrideOp::Append) +} + +fn valid_override_ops() -> impl Strategy> { + prop::collection::vec( + prop_oneof![valid_set_op(), valid_remove_op(), valid_append_op(),], + 0..10, + ) +} + +// ===== Property Tests ===== + +proptest! { + #[test] + fn test_empty_override_returns_base(base in valid_field_list()) { + let ops: Vec = vec![]; + let result = apply_override(base.clone(), &ops).unwrap(); + + assert_eq!(result, base); + } + + #[test] + fn test_override_never_panics( + base in valid_field_list(), + ops in valid_override_ops() + ) { + // Should never panic, might return error + let _ = apply_override(base, &ops); + } + + #[test] + fn test_set_always_succeeds( + base in valid_field_list(), + field in valid_field() + ) { + let ops = vec![OverrideOp::Set(field)]; + let result = apply_override(base, &ops); + + assert!(result.is_ok(), "Set operations should always succeed"); + } + + #[test] + fn test_remove_always_succeeds( + base in valid_field_list(), + name in valid_ident() + ) { + let ops = vec![OverrideOp::Remove(name)]; + let result = apply_override(base, &ops); + + assert!(result.is_ok(), "Remove operations should always succeed"); + } + + #[test] + fn test_set_adds_or_replaces( + base in valid_field_list(), + field in valid_field() + ) { + let ops = vec![OverrideOp::Set(field.clone())]; + let result = apply_override(base.clone(), &ops).unwrap(); + + // Result should contain the field + assert!(result.iter().any(|f| f.name == field.name)); + + // If field was in base, length should be same + // If field was not in base, length should increase by 1 + let was_in_base = base.iter().any(|f| f.name == field.name); + if was_in_base { + assert_eq!(result.len(), base.len()); + } else { + assert_eq!(result.len(), base.len() + 1); + } + } + + #[test] + fn test_remove_decreases_or_maintains_length( + base in valid_field_list(), + name in valid_ident() + ) { + let ops = vec![OverrideOp::Remove(name.clone())]; + let result = apply_override(base.clone(), &ops).unwrap(); + + // Result should not contain the field + assert!(!result.iter().any(|f| f.name == name)); + + // Length should be <= base length + assert!(result.len() <= base.len()); + } + + #[test] + fn test_append_to_empty_succeeds(field in valid_field()) { + let base = vec![]; + let ops = vec![OverrideOp::Append(field.clone())]; + let result = apply_override(base, &ops).unwrap(); + + assert_eq!(result.len(), 1); + assert_eq!(result[0].name, field.name); + } + + #[test] + fn test_append_duplicate_fails( + base in valid_field_list().prop_filter("non-empty", |f| !f.is_empty()) + ) { + // Try to append a field that already exists + let existing_field = base[0].clone(); + let ops = vec![OverrideOp::Append(existing_field)]; + let result = apply_override(base, &ops); + + assert!(result.is_err(), "Appending duplicate field should fail"); + } + + #[test] + fn test_set_is_idempotent_property( + base in valid_field_list(), + field in valid_field() + ) { + let ops = vec![OverrideOp::Set(field)]; + assert!(is_idempotent(&base, &ops)); + } + + #[test] + fn test_remove_is_idempotent_property( + base in valid_field_list(), + name in valid_ident() + ) { + let ops = vec![OverrideOp::Remove(name)]; + assert!(is_idempotent(&base, &ops)); + } + + #[test] + fn test_multiple_sets_last_wins( + base in valid_field_list(), + name in valid_ident(), + val1 in 0i64..100, + val2 in 100i64..200 + ) { + let field1 = Field { + name: name.clone(), + value: Value::Int(val1), + span: Span::new(0, 10), + }; + let field2 = Field { + name: name.clone(), + value: Value::Int(val2), + span: Span::new(0, 10), + }; + + let ops = vec![ + OverrideOp::Set(field1), + OverrideOp::Set(field2.clone()), + ]; + + let result = apply_override(base, &ops).unwrap(); + let final_field = result.iter().find(|f| f.name == name).unwrap(); + + // Last set should win + assert_eq!(final_field.value, field2.value); + } + + #[test] + fn test_set_after_remove_adds_back( + base in valid_field_list().prop_filter("non-empty", |f| !f.is_empty()) + ) { + let field = base[0].clone(); + let name = field.name.clone(); + + let ops = vec![ + OverrideOp::Remove(name.clone()), + OverrideOp::Set(field.clone()), + ]; + + let result = apply_override(base, &ops).unwrap(); + + // Field should exist after remove + set + assert!(result.iter().any(|f| f.name == name)); + } + + #[test] + fn test_remove_after_set_cancels_out( + base in valid_field_list(), + field in valid_field() + ) { + let name = field.name.clone(); + let ops = vec![ + OverrideOp::Set(field), + OverrideOp::Remove(name.clone()), + ]; + + let result = apply_override(base, &ops).unwrap(); + + // Field should not exist after set + remove + assert!(!result.iter().any(|f| f.name == name)); + } + + #[test] + fn test_operation_order_matters( + base in valid_field_list(), + name in valid_ident(), + val1 in 0i64..100, + val2 in 100i64..200 + ) { + let field1 = Field { + name: name.clone(), + value: Value::Int(val1), + span: Span::new(0, 10), + }; + let field2 = Field { + name: name.clone(), + value: Value::Int(val2), + span: Span::new(0, 10), + }; + + let ops1 = vec![ + OverrideOp::Set(field1.clone()), + OverrideOp::Set(field2.clone()), + ]; + + let ops2 = vec![ + OverrideOp::Set(field2.clone()), + OverrideOp::Set(field1.clone()), + ]; + + let result1 = apply_override(base.clone(), &ops1).unwrap(); + let result2 = apply_override(base, &ops2).unwrap(); + + // Different order should give different results (last set wins) + let value1 = result1.iter().find(|f| f.name == name).unwrap().value.clone(); + let value2 = result2.iter().find(|f| f.name == name).unwrap().value.clone(); + + assert_eq!(value1, Value::Int(val2)); + assert_eq!(value2, Value::Int(val1)); + } + + #[test] + fn test_append_after_remove_succeeds( + base in valid_field_list().prop_filter("non-empty", |f| !f.is_empty()) + ) { + let field = base[0].clone(); + let name = field.name.clone(); + + let ops = vec![ + OverrideOp::Remove(name.clone()), + OverrideOp::Append(field.clone()), + ]; + + let result = apply_override(base, &ops); + + // Should succeed - field was removed then appended + assert!(result.is_ok()); + let fields = result.unwrap(); + assert!(fields.iter().any(|f| f.name == name)); + } + + #[test] + fn test_associativity_for_sets( + base in valid_field_list(), + f1 in valid_field(), + f2 in valid_field(), + f3 in valid_field() + ) { + // Skip if any fields have the same name + if f1.name == f2.name || f2.name == f3.name || f1.name == f3.name { + return Ok(()); + } + + // (f1, f2), f3 + let ops1 = vec![ + OverrideOp::Set(f1.clone()), + OverrideOp::Set(f2.clone()), + ]; + let intermediate = apply_override(base.clone(), &ops1).unwrap(); + let result1 = apply_override(intermediate, &[OverrideOp::Set(f3.clone())]).unwrap(); + + // f1, (f2, f3) + let ops2 = vec![ + OverrideOp::Set(f2.clone()), + OverrideOp::Set(f3.clone()), + ]; + let intermediate = apply_override(base.clone(), &ops2).unwrap(); + let result2 = apply_override(intermediate, &[OverrideOp::Set(f1.clone())]).unwrap(); + + // Results might differ due to field order, but should have same fields + assert_eq!(result1.len(), result2.len()); + for field in &result1 { + assert!(result2.iter().any(|f| f.name == field.name)); + } + } +} diff --git a/src/resolve/mod.rs b/src/resolve/mod.rs new file mode 100644 index 0000000..3852170 --- /dev/null +++ b/src/resolve/mod.rs @@ -0,0 +1,228 @@ +//! Resolution engine for Storybook DSL +//! +//! This module takes parsed AST and: +//! - Builds a name table mapping qualified paths to declarations +//! - Resolves cross-references between entities +//! - Handles `use` statements +//! - Validates semantic constraints +//! - Produces rich error diagnostics +#![allow(unused_assignments)] // False positives in error enum fields used by thiserror + +pub mod convert; +pub mod links; +pub mod merge; +pub mod names; +pub mod types; +pub mod validate; + +#[cfg(test)] +mod prop_tests; + +#[cfg(test)] +mod integration_tests; + +#[cfg(test)] +#[path = "links_prop_tests.rs"] +mod links_prop_tests; + +#[cfg(test)] +#[path = "merge_prop_tests.rs"] +mod merge_prop_tests; + +#[cfg(test)] +#[path = "validate_prop_tests.rs"] +mod validate_prop_tests; + +#[cfg(test)] +#[path = "convert_prop_tests.rs"] +mod convert_prop_tests; + +#[cfg(test)] +#[path = "convert_integration_tests.rs"] +mod convert_integration_tests; + +use miette::Diagnostic; +pub use names::{ + NameTable, + QualifiedPath, +}; +use thiserror::Error; +pub use types::ResolvedFile; + +use crate::syntax::ast::File; + +/// Errors that can occur during resolution +#[derive(Error, Debug, Diagnostic)] +pub enum ResolveError { + #[error("Name '{name}' not found")] + NameNotFound { + #[allow(dead_code)] + #[allow(unused_assignments)] + name: String, + #[help] + suggestion: Option, + }, + + #[error("Duplicate definition of '{name}'")] + #[diagnostic(help("Each name must be unique within a file. You've defined '{name}' more than once. Either rename one of them or remove the duplicate definition. If you meant to extend or override the first definition, use template inheritance instead."))] + DuplicateDefinition { + #[allow(dead_code)] + #[allow(unused_assignments)] + name: String, + #[label("first defined here")] + first_location: (usize, usize), + #[label("redefined here")] + second_location: (usize, usize), + }, + + #[error("Circular dependency detected")] + CircularDependency { + #[help] + cycle: String, + }, + + #[error("Invalid field access: {message}")] + #[diagnostic(help("You're trying to access a field that doesn't exist or isn't accessible in this context. Check the field name for typos and make sure it's defined on the object you're accessing. In relationships, use 'self' for your own fields and 'other' for the other participant's fields."))] + InvalidFieldAccess { message: String }, + + #[error("Type mismatch: {message}")] + #[diagnostic(help("The value you provided doesn't match the expected type. For example, you might be using a number where text is expected, or vice versa. Check that your value matches what's expected: numbers for ages, decimals (0.0-1.0) for traits, strings in quotes for text, true/false for yes/no values."))] + TypeMismatch { message: String }, + + #[error("Validation error: {message}")] + ValidationError { + message: String, + #[help] + help: Option, + }, + + #[error("Life arc '{life_arc}': state '{state}' has transition to unknown state '{target}'")] + #[diagnostic(help("Make sure the target state '{target}' is defined in the same life_arc block. Available states: {available_states}"))] + UnknownLifeArcState { + life_arc: String, + state: String, + target: String, + available_states: String, + }, + + #[error("Trait '{field}' has value {value} which is outside valid range [{min}, {max}]")] + #[diagnostic(help("Normalized traits like '{field}' must have values between {min} and {max}. Try adjusting the value to fit within this range."))] + TraitOutOfRange { + field: String, + value: String, + min: f64, + max: f64, + }, + + #[error("Schedule blocks '{block1}' and '{block2}' overlap")] + #[diagnostic(help("Schedule blocks cannot overlap in time. Make sure each block ends before the next one begins. Block '{block1}' ends at {end1}, but '{block2}' starts at {start2}."))] + ScheduleOverlap { + block1: String, + block2: String, + end1: String, + start2: String, + }, + + #[error("Behavior tree '{tree}': unknown action '{action}'")] + #[diagnostic(help("The action '{action}' is not defined in the action registry. Make sure to define all custom actions, or check for typos."))] + UnknownBehaviorAction { tree: String, action: String }, + + #[error("Found {count} errors")] + #[diagnostic(help("Multiple errors were found during validation. Each error is listed below with its own help message. Fix them one by one, or use the hints to address common patterns."))] + MultipleErrors { + count: usize, + #[related] + errors: Vec, + }, +} + +/// Result type for resolution operations +pub type Result = std::result::Result; + +/// Collects multiple errors instead of failing fast +/// +/// This allows the compiler to continue validating and report all errors at +/// once, giving users a complete picture of what needs to be fixed. +#[derive(Debug, Default)] +pub struct ErrorCollector { + errors: Vec, +} + +impl ErrorCollector { + /// Create a new error collector + pub fn new() -> Self { + Self { errors: Vec::new() } + } + + /// Add an error to the collection + pub fn add(&mut self, error: ResolveError) { + // Don't add MultipleErrors to avoid nesting + match error { + | ResolveError::MultipleErrors { mut errors, .. } => { + self.errors.append(&mut errors); + }, + | _ => { + self.errors.push(error); + }, + } + } + + /// Check if any errors were collected + pub fn has_errors(&self) -> bool { + !self.errors.is_empty() + } + + /// Get the number of errors collected + pub fn len(&self) -> usize { + self.errors.len() + } + + /// Check if no errors have been collected + pub fn is_empty(&self) -> bool { + self.errors.is_empty() + } + + /// Convert to a Result, returning Ok if no errors or Err with all errors + pub fn into_result(self, ok_value: T) -> Result { + if self.errors.is_empty() { + Ok(ok_value) + } else if self.errors.len() == 1 { + // Single error - return it directly + Err(self.errors.into_iter().next().unwrap()) + } else { + // Multiple errors - wrap them + Err(ResolveError::MultipleErrors { + count: self.errors.len(), + errors: self.errors, + }) + } + } + + /// Add a Result to the collector, extracting any error + pub fn add_result(&mut self, result: Result) -> Option { + match result { + | Ok(value) => Some(value), + | Err(e) => { + self.add(e); + None + }, + } + } +} + +/// Resolve a parsed file into a validated, cross-referenced structure +pub fn resolve_file(file: File) -> Result { + // Phase 1: Build name table + let _name_table = NameTable::from_file(&file)?; + + // Phase 2: Resolve cross-references + // TODO: implement + + // Phase 3: Validate semantics + // TODO: implement + + // For now, just return a placeholder + Ok(ResolvedFile { + declarations: vec![], + }) +} diff --git a/src/resolve/names.rs b/src/resolve/names.rs new file mode 100644 index 0000000..97abfb0 --- /dev/null +++ b/src/resolve/names.rs @@ -0,0 +1,581 @@ +//! Name resolution and qualified paths +//! +//! This module handles: +//! - Building a name table from parsed AST +//! - Mapping qualified paths to declarations +//! - Resolving `use` statements +//! - Fuzzy matching for suggestions + +use indexmap::IndexMap; +use strsim::jaro_winkler; + +use crate::{ + resolve::{ + ResolveError, + Result, + }, + syntax::ast::{ + Declaration, + File, + Span, + }, +}; + +/// A qualified path like `characters::Martha` or `behaviors::WorkAtBakery` +pub type QualifiedPath = Vec; + +/// Kind of declaration +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum DeclKind { + Character, + Template, + LifeArc, + Schedule, + Behavior, + Institution, + Relationship, + Location, + Species, + Enum, +} + +/// Entry in the name table +#[derive(Debug, Clone)] +pub struct NameEntry { + pub kind: DeclKind, + pub qualified_path: QualifiedPath, + pub span: Span, + /// Index into the original declarations vector + pub decl_index: usize, +} + +/// Name table mapping qualified paths to declarations +#[derive(Debug, Clone)] +pub struct NameTable { + /// Map from qualified path to entry + entries: IndexMap, + /// Import statements (source path -> imported names) + imports: Vec, +} + +/// An import statement +#[derive(Debug, Clone)] +pub struct Import { + pub source_path: QualifiedPath, + pub kind: ImportKind, +} + +#[derive(Debug, Clone)] +pub enum ImportKind { + /// use foo::bar + Single(String), + /// use foo::{bar, baz} + Grouped(Vec), + /// use foo::* + Wildcard, +} + +impl NameTable { + /// Create a new empty name table + pub fn new() -> Self { + Self { + entries: IndexMap::new(), + imports: Vec::new(), + } + } + + /// Build a name table from a parsed file + pub fn from_file(file: &File) -> Result { + let mut table = Self::new(); + + // First pass: collect imports + for decl in &file.declarations { + if let Declaration::Use(use_decl) = decl { + let import = Import { + source_path: use_decl.path.clone(), + kind: match &use_decl.kind { + | crate::syntax::ast::UseKind::Single => { + // For single import, the last component is the name + let name = use_decl + .path + .last() + .ok_or_else(|| ResolveError::ValidationError { + message: "Empty use path".to_string(), + help: Some("Use statements must have at least one path component. For example: 'use characters::Martha;'".to_string()), + })? + .clone(); + ImportKind::Single(name) + }, + | crate::syntax::ast::UseKind::Grouped(names) => { + ImportKind::Grouped(names.clone()) + }, + | crate::syntax::ast::UseKind::Wildcard => ImportKind::Wildcard, + }, + }; + table.imports.push(import); + } + } + + // Second pass: register declarations + for (index, decl) in file.declarations.iter().enumerate() { + let (name, kind, span) = match decl { + | Declaration::Use(_) => continue, + | Declaration::Character(c) => { + (c.name.clone(), DeclKind::Character, c.span.clone()) + }, + | Declaration::Template(t) => (t.name.clone(), DeclKind::Template, t.span.clone()), + | Declaration::LifeArc(l) => (l.name.clone(), DeclKind::LifeArc, l.span.clone()), + | Declaration::Schedule(s) => (s.name.clone(), DeclKind::Schedule, s.span.clone()), + | Declaration::Behavior(b) => (b.name.clone(), DeclKind::Behavior, b.span.clone()), + | Declaration::Institution(i) => { + (i.name.clone(), DeclKind::Institution, i.span.clone()) + }, + | Declaration::Relationship(r) => { + (r.name.clone(), DeclKind::Relationship, r.span.clone()) + }, + | Declaration::Location(l) => (l.name.clone(), DeclKind::Location, l.span.clone()), + | Declaration::Species(s) => (s.name.clone(), DeclKind::Species, s.span.clone()), + | Declaration::Enum(e) => (e.name.clone(), DeclKind::Enum, e.span.clone()), + }; + + // For now, qualified path is just the name + // In a multi-file system, this would include directory structure + let qualified_path = vec![name.clone()]; + + // Check for duplicates + if let Some(existing) = table.entries.get(&qualified_path) { + return Err(ResolveError::DuplicateDefinition { + name, + first_location: (existing.span.start, existing.span.end), + second_location: (span.start, span.end), + }); + } + + table.entries.insert( + qualified_path.clone(), + NameEntry { + kind, + qualified_path, + span, + decl_index: index, + }, + ); + } + + Ok(table) + } + + /// Look up a name in the table + pub fn lookup(&self, path: &[String]) -> Option<&NameEntry> { + self.entries.get(path) + } + + /// Find the best fuzzy match for a name + pub fn find_suggestion(&self, name: &str) -> Option { + let mut best_match: Option<(String, f64)> = None; + + for path in self.entries.keys() { + // Compare against the last component of the path + let component = path.last()?; + let score = jaro_winkler(name, component); + + if score > 0.8 { + match &best_match { + | None => best_match = Some((component.clone(), score)), + | Some((_, best_score)) if score > *best_score => { + best_match = Some((component.clone(), score)); + }, + | _ => {}, + } + } + } + + best_match.map(|(name, _)| name) + } + + /// Get all entries of a specific kind + pub fn entries_of_kind(&self, kind: DeclKind) -> impl Iterator { + self.entries + .values() + .filter(move |entry| entry.kind == kind) + } + + /// Get all entries + pub fn all_entries(&self) -> impl Iterator { + self.entries.values() + } + + /// Resolve a name, considering imports + /// + /// This tries to resolve a simple name like "Martha" by: + /// 1. Looking in the local table + /// 2. Checking if it's explicitly imported + /// 3. Checking if it matches a wildcard import + pub fn resolve_name(&self, name: &str) -> Option<&NameEntry> { + // First try direct lookup (fully qualified or local) + if let Some(entry) = self.lookup(&[name.to_string()]) { + return Some(entry); + } + + // Then try imports + for import in &self.imports { + match &import.kind { + | ImportKind::Single(imported_name) if imported_name == name => { + // Found in single import + return self.lookup(&import.source_path); + }, + | ImportKind::Grouped(names) if names.contains(&name.to_string()) => { + // Found in grouped import + let mut path = import.source_path.clone(); + path.push(name.to_string()); + return self.lookup(&path); + }, + | ImportKind::Wildcard => { + // Try appending the name to the import path + let mut path = import.source_path.clone(); + path.push(name.to_string()); + if let Some(entry) = self.lookup(&path) { + return Some(entry); + } + }, + | _ => {}, + } + } + + None + } + + /// Get all imports + pub fn imports(&self) -> &[Import] { + &self.imports + } + + /// Merge another name table into this one + /// + /// This is used to combine name tables from multiple files into a single + /// project-wide table. Returns an error if there are duplicate definitions. + pub fn merge(&mut self, other: NameTable) -> Result<()> { + // Merge imports + self.imports.extend(other.imports); + + // Merge entries, checking for duplicates + for (path, entry) in other.entries { + if let Some(existing) = self.entries.get(&path) { + // Found a duplicate - extract the name from the path + let name = path.last().unwrap_or(&String::new()).clone(); + return Err(ResolveError::DuplicateDefinition { + name, + first_location: (existing.span.start, existing.span.end), + second_location: (entry.span.start, entry.span.end), + }); + } + self.entries.insert(path, entry); + } + + Ok(()) + } + + /// Build a combined name table from multiple files + /// + /// This creates name tables for each file and merges them together, + /// detecting any duplicate definitions across files. + pub fn from_files(files: &[File]) -> Result { + let mut combined = NameTable::new(); + + for file in files { + let table = NameTable::from_file(file)?; + combined.merge(table)?; + } + + Ok(combined) + } +} + +impl Default for NameTable { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::syntax::ast::{ + Character, + Template, + }; + + #[test] + fn test_name_table_basic() { + let file = File { + declarations: vec![ + Declaration::Character(Character { + name: "Martha".to_string(), + fields: vec![], + template: None, + + span: Span::new(0, 10), + }), + Declaration::Template(Template { + name: "Person".to_string(), + fields: vec![], + strict: false, + includes: vec![], + span: Span::new(20, 30), + }), + ], + }; + + let table = NameTable::from_file(&file).unwrap(); + + assert!(table.lookup(&["Martha".to_string()]).is_some()); + assert!(table.lookup(&["Person".to_string()]).is_some()); + assert!(table.lookup(&["Unknown".to_string()]).is_none()); + } + + #[test] + fn test_duplicate_detection() { + let file = File { + declarations: vec![ + Declaration::Character(Character { + name: "Martha".to_string(), + fields: vec![], + template: None, + + span: Span::new(0, 10), + }), + Declaration::Character(Character { + name: "Martha".to_string(), + fields: vec![], + template: None, + + span: Span::new(20, 30), + }), + ], + }; + + let result = NameTable::from_file(&file); + assert!(result.is_err()); + } + + #[test] + fn test_fuzzy_matching() { + let file = File { + declarations: vec![Declaration::Character(Character { + name: "Martha".to_string(), + fields: vec![], + template: None, + + span: Span::new(0, 10), + })], + }; + + let table = NameTable::from_file(&file).unwrap(); + + // "Marhta" (typo) should suggest "Martha" + let suggestion = table.find_suggestion("Marhta"); + assert_eq!(suggestion, Some("Martha".to_string())); + } + + #[test] + fn test_single_import() { + use crate::syntax::ast::{ + UseDecl, + UseKind, + }; + + let file = File { + declarations: vec![ + Declaration::Use(UseDecl { + path: vec!["characters".to_string(), "Martha".to_string()], + kind: UseKind::Single, + span: Span::new(0, 10), + }), + Declaration::Character(Character { + name: "characters".to_string(), + fields: vec![], + template: None, + + span: Span::new(20, 30), + }), + ], + }; + + let table = NameTable::from_file(&file).unwrap(); + assert_eq!(table.imports.len(), 1); + + match &table.imports[0].kind { + | ImportKind::Single(name) => assert_eq!(name, "Martha"), + | _ => panic!("Expected single import"), + } + } + + #[test] + fn test_grouped_import() { + use crate::syntax::ast::{ + UseDecl, + UseKind, + }; + + let file = File { + declarations: vec![Declaration::Use(UseDecl { + path: vec!["characters".to_string()], + kind: UseKind::Grouped(vec!["Martha".to_string(), "David".to_string()]), + span: Span::new(0, 10), + })], + }; + + let table = NameTable::from_file(&file).unwrap(); + assert_eq!(table.imports.len(), 1); + + match &table.imports[0].kind { + | ImportKind::Grouped(names) => { + assert_eq!(names.len(), 2); + assert!(names.contains(&"Martha".to_string())); + assert!(names.contains(&"David".to_string())); + }, + | _ => panic!("Expected grouped import"), + } + } + + #[test] + fn test_wildcard_import() { + use crate::syntax::ast::{ + UseDecl, + UseKind, + }; + + let file = File { + declarations: vec![Declaration::Use(UseDecl { + path: vec!["characters".to_string()], + kind: UseKind::Wildcard, + span: Span::new(0, 10), + })], + }; + + let table = NameTable::from_file(&file).unwrap(); + assert_eq!(table.imports.len(), 1); + + match &table.imports[0].kind { + | ImportKind::Wildcard => {}, + | _ => panic!("Expected wildcard import"), + } + } + + #[test] + fn test_merge_tables() { + let file1 = File { + declarations: vec![Declaration::Character(Character { + name: "Martha".to_string(), + fields: vec![], + template: None, + + span: Span::new(0, 10), + })], + }; + + let file2 = File { + declarations: vec![Declaration::Character(Character { + name: "David".to_string(), + fields: vec![], + template: None, + + span: Span::new(0, 10), + })], + }; + + let mut table1 = NameTable::from_file(&file1).unwrap(); + let table2 = NameTable::from_file(&file2).unwrap(); + + table1.merge(table2).unwrap(); + + // Both names should be in the merged table + assert!(table1.lookup(&["Martha".to_string()]).is_some()); + assert!(table1.lookup(&["David".to_string()]).is_some()); + } + + #[test] + fn test_merge_detects_duplicates() { + let file1 = File { + declarations: vec![Declaration::Character(Character { + name: "Martha".to_string(), + fields: vec![], + template: None, + + span: Span::new(0, 10), + })], + }; + + let file2 = File { + declarations: vec![Declaration::Character(Character { + name: "Martha".to_string(), + fields: vec![], + template: None, + + span: Span::new(20, 30), + })], + }; + + let mut table1 = NameTable::from_file(&file1).unwrap(); + let table2 = NameTable::from_file(&file2).unwrap(); + + let result = table1.merge(table2); + assert!(result.is_err()); + } + + #[test] + fn test_from_files() { + let files = vec![ + File { + declarations: vec![Declaration::Character(Character { + name: "Martha".to_string(), + fields: vec![], + template: None, + + span: Span::new(0, 10), + })], + }, + File { + declarations: vec![Declaration::Character(Character { + name: "David".to_string(), + fields: vec![], + template: None, + + span: Span::new(0, 10), + })], + }, + ]; + + let table = NameTable::from_files(&files).unwrap(); + + assert!(table.lookup(&["Martha".to_string()]).is_some()); + assert!(table.lookup(&["David".to_string()]).is_some()); + } + + #[test] + fn test_from_files_detects_duplicates() { + let files = vec![ + File { + declarations: vec![Declaration::Character(Character { + name: "Martha".to_string(), + fields: vec![], + template: None, + + span: Span::new(0, 10), + })], + }, + File { + declarations: vec![Declaration::Character(Character { + name: "Martha".to_string(), + fields: vec![], + template: None, + + span: Span::new(20, 30), + })], + }, + ]; + + let result = NameTable::from_files(&files); + assert!(result.is_err()); + } +} diff --git a/src/resolve/prop_tests.rs b/src/resolve/prop_tests.rs new file mode 100644 index 0000000..252fb24 --- /dev/null +++ b/src/resolve/prop_tests.rs @@ -0,0 +1,303 @@ +//! Property-based tests for the resolution engine + +use proptest::prelude::*; + +use crate::{ + resolve::names::{ + DeclKind, + NameTable, + }, + syntax::ast::*, +}; + +// ===== Generators ===== + +fn valid_ident() -> impl Strategy { + "[a-zA-Z_][a-zA-Z0-9_]{0,20}".prop_filter("not a keyword", |s| { + !matches!( + s.as_str(), + "use" | + "character" | + "template" | + "life_arc" | + "schedule" | + "behavior" | + "institution" | + "relationship" | + "location" | + "species" | + "enum" | + "state" | + "on" | + "as" | + "self" | + "other" | + "remove" | + "append" | + "forall" | + "exists" | + "in" | + "where" | + "and" | + "or" | + "not" | + "is" | + "true" | + "false" + ) + }) +} + +fn valid_character_decl() -> impl Strategy { + valid_ident().prop_map(|name| { + let decl = Declaration::Character(Character { + name: name.clone(), + fields: vec![], + template: None, + + span: Span::new(0, 10), + }); + (name, decl) + }) +} + +fn valid_template_decl() -> impl Strategy { + valid_ident().prop_map(|name| { + let decl = Declaration::Template(Template { + name: name.clone(), + fields: vec![], + strict: false, + includes: vec![], + span: Span::new(0, 10), + }); + (name, decl) + }) +} + +fn valid_enum_decl() -> impl Strategy { + (valid_ident(), prop::collection::vec(valid_ident(), 1..5)).prop_map(|(name, variants)| { + let decl = Declaration::Enum(EnumDecl { + name: name.clone(), + variants, + span: Span::new(0, 10), + }); + (name, decl) + }) +} + +fn valid_use_single() -> impl Strategy { + (valid_ident(), valid_ident()).prop_map(|(module, name)| { + Declaration::Use(UseDecl { + path: vec![module, name], + kind: UseKind::Single, + span: Span::new(0, 10), + }) + }) +} + +fn valid_use_grouped() -> impl Strategy { + (valid_ident(), prop::collection::vec(valid_ident(), 1..5)).prop_map(|(module, names)| { + Declaration::Use(UseDecl { + path: vec![module], + kind: UseKind::Grouped(names), + span: Span::new(0, 10), + }) + }) +} + +fn valid_use_wildcard() -> impl Strategy { + valid_ident().prop_map(|module| { + Declaration::Use(UseDecl { + path: vec![module], + kind: UseKind::Wildcard, + span: Span::new(0, 10), + }) + }) +} + +// ===== Property Tests ===== + +proptest! { + #[test] + fn test_name_table_registers_all_declarations( + chars in prop::collection::vec(valid_character_decl(), 0..10) + ) { + let declarations: Vec<_> = chars.iter().map(|(_, decl)| decl.clone()).collect(); + let file = File { declarations }; + + let result = NameTable::from_file(&file); + + if chars.is_empty() { + // Empty file should succeed + assert!(result.is_ok()); + } else { + // Check if there are duplicates + let mut seen = std::collections::HashSet::new(); + let has_duplicates = chars.iter().any(|(name, _)| !seen.insert(name)); + + if has_duplicates { + // Should fail with duplicate error + assert!(result.is_err()); + } else { + // Should succeed and all names should be registered + let table = result.unwrap(); + for (name, _) in &chars { + assert!(table.lookup(std::slice::from_ref(name)).is_some(), + "Name '{}' should be registered", name); + } + } + } + } + + #[test] + fn test_duplicate_detection_always_fails( + name in valid_ident(), + count in 2usize..5 + ) { + let declarations: Vec<_> = (0..count).map(|i| { + Declaration::Character(Character { + name: name.clone(), + fields: vec![], + template: None, + + span: Span::new(i * 10, i * 10 + 10), + }) + }).collect(); + + let file = File { declarations }; + let result = NameTable::from_file(&file); + + // Should always fail with duplicate error + assert!(result.is_err()); + } + + #[test] + fn test_lookup_is_case_sensitive(name in valid_ident()) { + let file = File { + declarations: vec![ + Declaration::Character(Character { + name: name.clone(), + fields: vec![], + template: None, + + span: Span::new(0, 10), + }), + ], + }; + + let table = NameTable::from_file(&file).unwrap(); + + // Original name should be found + assert!(table.lookup(std::slice::from_ref(&name)).is_some()); + + // Different case should not be found + let uppercase = name.to_uppercase(); + if uppercase != name { + assert!(table.lookup(&[uppercase]).is_none()); + } + } + + #[test] + fn test_kind_filtering_works( + chars in prop::collection::vec(valid_character_decl(), 0..5), + templates in prop::collection::vec(valid_template_decl(), 0..5), + enums in prop::collection::vec(valid_enum_decl(), 0..5) + ) { + let mut declarations = vec![]; + declarations.extend(chars.iter().map(|(_, d)| d.clone())); + declarations.extend(templates.iter().map(|(_, d)| d.clone())); + declarations.extend(enums.iter().map(|(_, d)| d.clone())); + + let file = File { declarations }; + + // Only proceed if no duplicates + let mut seen = std::collections::HashSet::new(); + let has_duplicates = chars.iter().any(|(name, _)| !seen.insert(name)) + || templates.iter().any(|(name, _)| !seen.insert(name)) + || enums.iter().any(|(name, _)| !seen.insert(name)); + + if !has_duplicates { + let table = NameTable::from_file(&file).unwrap(); + + let char_count = table.entries_of_kind(DeclKind::Character).count(); + let template_count = table.entries_of_kind(DeclKind::Template).count(); + let enum_count = table.entries_of_kind(DeclKind::Enum).count(); + + assert_eq!(char_count, chars.len()); + assert_eq!(template_count, templates.len()); + assert_eq!(enum_count, enums.len()); + } + } + + #[test] + fn test_use_statements_are_collected( + uses in prop::collection::vec( + prop_oneof![ + valid_use_single(), + valid_use_grouped(), + valid_use_wildcard(), + ], + 0..10 + ) + ) { + let file = File { declarations: uses.clone() }; + let table = NameTable::from_file(&file).unwrap(); + + assert_eq!(table.imports().len(), uses.len()); + } + + #[test] + fn test_fuzzy_matching_finds_close_names( + name in valid_ident().prop_filter("long enough", |s| s.len() > 3) + ) { + let file = File { + declarations: vec![ + Declaration::Character(Character { + name: name.clone(), + fields: vec![], + template: None, + + span: Span::new(0, 10), + }), + ], + }; + + let table = NameTable::from_file(&file).unwrap(); + + // Create a typo by swapping two adjacent characters + if name.len() >= 2 { + let mut chars: Vec = name.chars().collect(); + chars.swap(0, 1); + let typo: String = chars.into_iter().collect(); + + // Should suggest the original name + if let Some(suggestion) = table.find_suggestion(&typo) { + assert_eq!(suggestion, name); + } + } + } + + #[test] + fn test_mixed_declarations_and_imports( + chars in prop::collection::vec(valid_character_decl(), 1..5), + uses in prop::collection::vec(valid_use_single(), 0..3) + ) { + let mut declarations = uses; + declarations.extend(chars.iter().map(|(_, d)| d.clone())); + + let file = File { declarations }; + + // Check for duplicates + let mut seen = std::collections::HashSet::new(); + let has_duplicates = chars.iter().any(|(name, _)| !seen.insert(name)); + + if !has_duplicates { + let table = NameTable::from_file(&file).unwrap(); + + // All characters should be registered + for (name, _) in &chars { + assert!(table.lookup(std::slice::from_ref(name)).is_some()); + } + } + } +} diff --git a/src/resolve/types.rs b/src/resolve/types.rs new file mode 100644 index 0000000..ed8fef5 --- /dev/null +++ b/src/resolve/types.rs @@ -0,0 +1,138 @@ +//! Resolved types - what consumers see after resolution +//! +//! These types are similar to AST types but represent fully resolved, +//! validated entities with all cross-references resolved. + +use crate::syntax::ast::{ + Field, + Span, +}; + +/// A fully resolved file with all cross-references resolved +#[derive(Debug, Clone)] +pub struct ResolvedFile { + pub declarations: Vec, +} + +/// A resolved top-level declaration +#[derive(Debug, Clone)] +pub enum ResolvedDeclaration { + Character(ResolvedCharacter), + Template(ResolvedTemplate), + LifeArc(ResolvedLifeArc), + Schedule(ResolvedSchedule), + Behavior(ResolvedBehavior), + Institution(ResolvedInstitution), + Relationship(ResolvedRelationship), + Location(ResolvedLocation), + Species(ResolvedSpecies), + Enum(ResolvedEnum), +} + +/// A resolved character with all template overrides applied +#[derive(Debug, Clone)] +pub struct ResolvedCharacter { + pub name: String, + pub fields: Vec, + pub span: Span, + /// Qualified path (e.g., characters::Martha) + pub qualified_path: Vec, +} + +/// A resolved template definition +#[derive(Debug, Clone)] +pub struct ResolvedTemplate { + pub name: String, + pub fields: Vec, + pub span: Span, + pub qualified_path: Vec, +} + +/// A resolved life arc state machine +#[derive(Debug, Clone)] +pub struct ResolvedLifeArc { + pub name: String, + pub states: Vec, + pub span: Span, + pub qualified_path: Vec, +} + +#[derive(Debug, Clone)] +pub struct ResolvedArcState { + pub name: String, + pub transitions: Vec, + pub span: Span, +} + +#[derive(Debug, Clone)] +pub struct ResolvedTransition { + pub to: String, + /// Validated - target state exists + pub target_state_index: usize, + pub condition: crate::syntax::ast::Expr, + pub span: Span, +} + +/// A resolved schedule +#[derive(Debug, Clone)] +pub struct ResolvedSchedule { + pub name: String, + pub blocks: Vec, + pub span: Span, + pub qualified_path: Vec, +} + +/// A resolved behavior tree +#[derive(Debug, Clone)] +pub struct ResolvedBehavior { + pub name: String, + pub root: crate::syntax::ast::BehaviorNode, + pub span: Span, + pub qualified_path: Vec, +} + +/// A resolved institution +#[derive(Debug, Clone)] +pub struct ResolvedInstitution { + pub name: String, + pub fields: Vec, + pub span: Span, + pub qualified_path: Vec, +} + +/// A resolved relationship with both sides validated +#[derive(Debug, Clone)] +pub struct ResolvedRelationship { + pub name: String, + pub participants: Vec, + pub fields: Vec, + pub span: Span, + pub qualified_path: Vec, +} + +/// A resolved location +#[derive(Debug, Clone)] +pub struct ResolvedLocation { + pub name: String, + pub fields: Vec, + pub span: Span, + pub qualified_path: Vec, +} + +/// A resolved species +#[derive(Debug, Clone)] +pub struct ResolvedSpecies { + pub name: String, + pub fields: Vec, + pub span: Span, + pub qualified_path: Vec, +} + +/// A resolved enum +#[derive(Debug, Clone)] +pub struct ResolvedEnum { + pub name: String, + pub variants: Vec, + pub span: Span, + pub qualified_path: Vec, +} diff --git a/src/resolve/validate.rs b/src/resolve/validate.rs new file mode 100644 index 0000000..730a68c --- /dev/null +++ b/src/resolve/validate.rs @@ -0,0 +1,501 @@ +//! Semantic validation for Storybook entities +//! +//! This module validates semantic constraints that can't be checked during +//! parsing: +//! - Reserved keyword conflicts in field names +//! - Trait value ranges +//! - Schedule time overlaps +//! - Life arc transition validity +//! - Behavior tree action registry checks +//! - Relationship bond values (0.0 .. 1.0) + +use std::collections::HashSet; + +use crate::{ + resolve::{ + ErrorCollector, + ResolveError, + Result, + }, + syntax::ast::*, +}; + +/// List of reserved keywords that cannot be used as field names +const RESERVED_KEYWORDS: &[&str] = &[ + // Top-level declaration keywords + "character", + "template", + "life_arc", + "schedule", + "behavior", + "institution", + "relationship", + "location", + "species", + "enum", + // Statement keywords + "use", + "state", + "on", + "as", + "remove", + "append", + "strict", + "include", + "from", + // Expression keywords + "self", + "other", + "forall", + "exists", + "in", + "where", + "and", + "or", + "not", + "is", + "true", + "false", +]; + +/// Validate that field names don't conflict with reserved keywords +pub fn validate_no_reserved_keywords(fields: &[Field], collector: &mut ErrorCollector) { + for field in fields { + if RESERVED_KEYWORDS.contains(&field.name.as_str()) { + collector.add(ResolveError::ValidationError { + message: format!( + "Field name '{}' is a reserved keyword and cannot be used", + field.name + ), + help: Some(format!( + "The name '{}' is reserved by the Storybook language. Try using a different name like '{}Type', '{}Value', or 'my{}'.", + field.name, + field.name, + field.name, + field.name.chars().next().unwrap_or('x').to_uppercase().collect::() + &field.name[1..] + )), + }); + } + } +} + +/// Validate trait values are within valid ranges +pub fn validate_trait_ranges(fields: &[Field], collector: &mut ErrorCollector) { + for field in fields { + match &field.value { + | Value::Float(f) => { + // Normalized trait values should be 0.0 .. 1.0 + if (field.name.ends_with("_normalized") || + field.name == "bond" || + field.name == "trust" || + field.name == "love") && + !(0.0..=1.0).contains(f) + { + collector.add(ResolveError::TraitOutOfRange { + field: field.name.clone(), + value: f.to_string(), + min: 0.0, + max: 1.0, + }); + } + }, + | Value::Int(i) => { + // Age should be reasonable + if field.name == "age" && (*i < 0 || *i > 150) { + collector.add(ResolveError::TraitOutOfRange { + field: "age".to_string(), + value: i.to_string(), + min: 0.0, + max: 150.0, + }); + } + }, + | _ => {}, + } + } +} + +/// Validate relationship bond values are in [0.0, 1.0] +pub fn validate_relationship_bonds(relationships: &[Relationship], collector: &mut ErrorCollector) { + for rel in relationships { + for field in &rel.fields { + if field.name == "bond" { + if let Value::Float(f) = field.value { + if !(0.0..=1.0).contains(&f) { + collector.add(ResolveError::TraitOutOfRange { + field: "bond".to_string(), + value: f.to_string(), + min: 0.0, + max: 1.0, + }); + } + } + } + } + + // Validate self/other blocks if present + for participant in &rel.participants { + if let Some(ref self_fields) = participant.self_block { + validate_trait_ranges(self_fields, collector); + } + if let Some(ref other_fields) = participant.other_block { + validate_trait_ranges(other_fields, collector); + } + } + } +} + +/// Validate schedule blocks don't overlap in time +pub fn validate_schedule_overlaps(schedule: &Schedule, collector: &mut ErrorCollector) { + // Sort blocks by start time + let mut sorted_blocks: Vec<_> = schedule.blocks.iter().collect(); + sorted_blocks.sort_by_key(|b| (b.start.hour as u32) * 60 + (b.start.minute as u32)); + + // Check for overlaps + for i in 0..sorted_blocks.len() { + for j in (i + 1)..sorted_blocks.len() { + let block1 = sorted_blocks[i]; + let block2 = sorted_blocks[j]; + + let end1 = (block1.end.hour as u32) * 60 + (block1.end.minute as u32); + let start2 = (block2.start.hour as u32) * 60 + (block2.start.minute as u32); + + // Check if blocks overlap + if end1 > start2 { + collector.add(ResolveError::ScheduleOverlap { + block1: format!( + "{} ({}:{:02}-{}:{:02})", + block1.activity, + block1.start.hour, + block1.start.minute, + block1.end.hour, + block1.end.minute + ), + block2: format!( + "{} ({}:{:02}-{}:{:02})", + block2.activity, + block2.start.hour, + block2.start.minute, + block2.end.hour, + block2.end.minute + ), + end1: format!("{}:{:02}", block1.end.hour, block1.end.minute), + start2: format!("{}:{:02}", block2.start.hour, block2.start.minute), + }); + } + } + } +} + +/// Validate life arc state machine has valid transitions +pub fn validate_life_arc_transitions(life_arc: &LifeArc, collector: &mut ErrorCollector) { + // Collect all state names + let mut state_names = HashSet::new(); + for state in &life_arc.states { + state_names.insert(state.name.clone()); + } + + // Validate all transitions point to valid states + for state in &life_arc.states { + for transition in &state.transitions { + if !state_names.contains(&transition.to) { + let available_states = state_names + .iter() + .map(|s| format!("'{}'", s)) + .collect::>() + .join(", "); + + collector.add(ResolveError::UnknownLifeArcState { + life_arc: life_arc.name.clone(), + state: state.name.clone(), + target: transition.to.clone(), + available_states, + }); + } + } + } + + // Warn if states have no outgoing transitions (terminal states) + // This is not an error, just informational +} + +/// Validate behavior tree actions are known +/// +/// If action_registry is empty, skips validation (no schema provided). +pub fn validate_behavior_tree_actions( + tree: &Behavior, + action_registry: &HashSet, + collector: &mut ErrorCollector, +) { + // Skip validation if no action schema was provided + if action_registry.is_empty() { + return; + } + + validate_tree_node_actions(&tree.root, action_registry, &tree.name, collector) +} + +fn validate_tree_node_actions( + node: &BehaviorNode, + action_registry: &HashSet, + tree_name: &str, + collector: &mut ErrorCollector, +) { + match node { + | BehaviorNode::Sequence(children) | BehaviorNode::Selector(children) => { + for child in children { + validate_tree_node_actions(child, action_registry, tree_name, collector); + } + }, + | BehaviorNode::Action(name, _params) => { + if !action_registry.contains(name) { + collector.add(ResolveError::UnknownBehaviorAction { + tree: tree_name.to_string(), + action: name.clone(), + }); + } + }, + | BehaviorNode::Condition(_) => { + // Conditions are validated separately via expression validation + }, + | BehaviorNode::Decorator(_name, child) => { + validate_tree_node_actions(child, action_registry, tree_name, collector); + }, + | BehaviorNode::SubTree(_path) => { + // SubTree references validated separately + }, + } +} + +/// Validate an entire file +/// +/// Collects all validation errors and returns them together instead of failing +/// fast. +pub fn validate_file(file: &File, action_registry: &HashSet) -> Result<()> { + let mut collector = ErrorCollector::new(); + + for decl in &file.declarations { + match decl { + | Declaration::Character(c) => { + validate_trait_ranges(&c.fields, &mut collector); + }, + | Declaration::Relationship(r) => { + validate_relationship_bonds(std::slice::from_ref(r), &mut collector); + }, + | Declaration::Schedule(s) => { + validate_schedule_overlaps(s, &mut collector); + }, + | Declaration::LifeArc(la) => { + validate_life_arc_transitions(la, &mut collector); + }, + | Declaration::Behavior(bt) => { + validate_behavior_tree_actions(bt, action_registry, &mut collector); + }, + | _ => { + // Other declarations don't need validation yet + }, + } + } + + collector.into_result(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_valid_trait_ranges() { + let fields = vec![ + Field { + name: "bond".to_string(), + value: Value::Float(0.8), + span: Span::new(0, 10), + }, + Field { + name: "age".to_string(), + value: Value::Int(30), + span: Span::new(0, 10), + }, + ]; + + let mut collector = ErrorCollector::new(); + validate_trait_ranges(&fields, &mut collector); + assert!(!collector.has_errors()); + } + + #[test] + fn test_invalid_bond_value_too_high() { + let fields = vec![Field { + name: "bond".to_string(), + value: Value::Float(1.5), + span: Span::new(0, 10), + }]; + + let mut collector = ErrorCollector::new(); + validate_trait_ranges(&fields, &mut collector); + assert!(collector.has_errors()); + } + + #[test] + fn test_invalid_bond_value_negative() { + let fields = vec![Field { + name: "bond".to_string(), + value: Value::Float(-0.1), + span: Span::new(0, 10), + }]; + + let mut collector = ErrorCollector::new(); + validate_trait_ranges(&fields, &mut collector); + assert!(collector.has_errors()); + } + + #[test] + fn test_invalid_age_negative() { + let fields = vec![Field { + name: "age".to_string(), + value: Value::Int(-5), + span: Span::new(0, 10), + }]; + + let mut collector = ErrorCollector::new(); + validate_trait_ranges(&fields, &mut collector); + assert!(collector.has_errors()); + } + + #[test] + fn test_invalid_age_too_high() { + let fields = vec![Field { + name: "age".to_string(), + value: Value::Int(200), + span: Span::new(0, 10), + }]; + + let mut collector = ErrorCollector::new(); + validate_trait_ranges(&fields, &mut collector); + assert!(collector.has_errors()); + } + + #[test] + fn test_valid_relationship_bond() { + let relationship = Relationship { + name: "Test".to_string(), + participants: vec![], + fields: vec![Field { + name: "bond".to_string(), + value: Value::Float(0.9), + span: Span::new(0, 10), + }], + span: Span::new(0, 100), + }; + + let mut collector = ErrorCollector::new(); + validate_relationship_bonds(&[relationship], &mut collector); + assert!(!collector.has_errors()); + } + + #[test] + fn test_invalid_relationship_bond() { + let relationship = Relationship { + name: "Test".to_string(), + participants: vec![], + fields: vec![Field { + name: "bond".to_string(), + value: Value::Float(1.2), + span: Span::new(0, 10), + }], + span: Span::new(0, 100), + }; + + let mut collector = ErrorCollector::new(); + validate_relationship_bonds(&[relationship], &mut collector); + assert!(collector.has_errors()); + } + + #[test] + fn test_life_arc_valid_transitions() { + let life_arc = LifeArc { + name: "Test".to_string(), + states: vec![ + ArcState { + name: "start".to_string(), + transitions: vec![Transition { + to: "end".to_string(), + condition: Expr::Identifier(vec!["ready".to_string()]), + span: Span::new(0, 10), + }], + span: Span::new(0, 50), + }, + ArcState { + name: "end".to_string(), + transitions: vec![], + span: Span::new(50, 100), + }, + ], + span: Span::new(0, 100), + }; + + let mut collector = ErrorCollector::new(); + validate_life_arc_transitions(&life_arc, &mut collector); + assert!(!collector.has_errors()); + } + + #[test] + fn test_life_arc_invalid_transition() { + let life_arc = LifeArc { + name: "Test".to_string(), + states: vec![ArcState { + name: "start".to_string(), + transitions: vec![Transition { + to: "nonexistent".to_string(), + condition: Expr::Identifier(vec!["ready".to_string()]), + span: Span::new(0, 10), + }], + span: Span::new(0, 50), + }], + span: Span::new(0, 100), + }; + + let mut collector = ErrorCollector::new(); + validate_life_arc_transitions(&life_arc, &mut collector); + assert!(collector.has_errors()); + } + + #[test] + fn test_behavior_tree_valid_actions() { + let mut registry = HashSet::new(); + registry.insert("walk".to_string()); + registry.insert("eat".to_string()); + + let tree = Behavior { + name: "Test".to_string(), + root: BehaviorNode::Sequence(vec![ + BehaviorNode::Action("walk".to_string(), vec![]), + BehaviorNode::Action("eat".to_string(), vec![]), + ]), + span: Span::new(0, 100), + }; + + let mut collector = ErrorCollector::new(); + validate_behavior_tree_actions(&tree, ®istry, &mut collector); + assert!(!collector.has_errors()); + } + + #[test] + fn test_behavior_tree_invalid_action() { + // Create a registry with some actions (but not "unknown_action") + let mut registry = HashSet::new(); + registry.insert("walk".to_string()); + registry.insert("work".to_string()); + + let tree = Behavior { + name: "Test".to_string(), + root: BehaviorNode::Action("unknown_action".to_string(), vec![]), + span: Span::new(0, 100), + }; + + let mut collector = ErrorCollector::new(); + validate_behavior_tree_actions(&tree, ®istry, &mut collector); + assert!(collector.has_errors()); + } +} diff --git a/src/resolve/validate_prop_tests.rs b/src/resolve/validate_prop_tests.rs new file mode 100644 index 0000000..da4a604 --- /dev/null +++ b/src/resolve/validate_prop_tests.rs @@ -0,0 +1,226 @@ +//! Property tests for semantic validation + +use std::collections::HashSet; + +use proptest::prelude::*; + +use crate::{ + resolve::{ + validate::*, + ErrorCollector, + }, + syntax::ast::*, +}; + +// ===== Generators ===== + +fn valid_bond_field() -> impl Strategy { + (0.0..=1.0).prop_map(|f| Field { + name: "bond".to_string(), + value: Value::Float(f), + span: Span::new(0, 10), + }) +} + +fn invalid_bond_field() -> impl Strategy { + prop_oneof![ + (-100.0..0.0).prop_map(|f| Field { + name: "bond".to_string(), + value: Value::Float(f), + span: Span::new(0, 10), + }), + (1.0..100.0).prop_map(|f| Field { + name: "bond".to_string(), + value: Value::Float(f), + span: Span::new(0, 10), + }), + ] +} + +fn valid_age_field() -> impl Strategy { + (0i64..=150).prop_map(|age| Field { + name: "age".to_string(), + value: Value::Int(age), + span: Span::new(0, 10), + }) +} + +fn invalid_age_field() -> impl Strategy { + prop_oneof![ + (-100i64..-1).prop_map(|age| Field { + name: "age".to_string(), + value: Value::Int(age), + span: Span::new(0, 10), + }), + (151i64..300).prop_map(|age| Field { + name: "age".to_string(), + value: Value::Int(age), + span: Span::new(0, 10), + }), + ] +} + +// ===== Property Tests ===== + +proptest! { + #[test] + fn test_valid_bond_always_passes(field in valid_bond_field()) { + let fields = vec![field]; + let mut collector = ErrorCollector::new(); + validate_trait_ranges(&fields, &mut collector); + assert!(!collector.has_errors()); + } + + #[test] + fn test_invalid_bond_always_fails(field in invalid_bond_field()) { + let fields = vec![field]; + let mut collector = ErrorCollector::new(); + validate_trait_ranges(&fields, &mut collector); + assert!(collector.has_errors()); + } + + #[test] + fn test_valid_age_always_passes(field in valid_age_field()) { + let fields = vec![field]; + let mut collector = ErrorCollector::new(); + validate_trait_ranges(&fields, &mut collector); + assert!(!collector.has_errors()); + } + + #[test] + fn test_invalid_age_always_fails(field in invalid_age_field()) { + let fields = vec![field]; + let mut collector = ErrorCollector::new(); + validate_trait_ranges(&fields, &mut collector); + assert!(collector.has_errors()); + } + + #[test] + fn test_bond_exact_bounds(f in 0.0f64..=1.0) { + let field = Field { + name: "bond".to_string(), + value: Value::Float(f), + span: Span::new(0, 10), + }; + let mut collector = ErrorCollector::new(); + validate_trait_ranges(&[field], &mut collector); + assert!(!collector.has_errors()); + } + + #[test] + fn test_relationship_bond_in_range_passes( + bond_value in 0.0f64..=1.0 + ) { + let rel = Relationship { + name: "Test".to_string(), + participants: vec![], + fields: vec![Field { + name: "bond".to_string(), + value: Value::Float(bond_value), + span: Span::new(0, 10), + }], + span: Span::new(0, 100), + }; + + let mut collector = ErrorCollector::new(); + validate_relationship_bonds(&[rel], &mut collector); + assert!(!collector.has_errors()); + } + + #[test] + fn test_relationship_bond_out_of_range_fails( + bond_value in prop_oneof![(-100.0..0.0), (1.0..100.0)] + ) { + let rel = Relationship { + name: "Test".to_string(), + participants: vec![], + fields: vec![Field { + name: "bond".to_string(), + value: Value::Float(bond_value), + span: Span::new(0, 10), + }], + span: Span::new(0, 100), + }; + + let mut collector = ErrorCollector::new(); + validate_relationship_bonds(&[rel], &mut collector); + assert!(collector.has_errors()); + } + + #[test] + fn test_life_arc_with_valid_transitions_passes( + state1_name in "[a-z]{3,10}", + state2_name in "[a-z]{3,10}" + ) { + if state1_name == state2_name { + return Ok(()); + } + + let life_arc = LifeArc { + name: "Test".to_string(), + states: vec![ + ArcState { + name: state1_name.clone(), + transitions: vec![Transition { + to: state2_name.clone(), + condition: Expr::BoolLit(true), + span: Span::new(0, 10), + }], + span: Span::new(0, 50), + }, + ArcState { + name: state2_name, + transitions: vec![], + span: Span::new(50, 100), + }, + ], + span: Span::new(0, 100), + }; + + let mut collector = ErrorCollector::new(); + validate_life_arc_transitions(&life_arc, &mut collector); + assert!(!collector.has_errors()); + } + + #[test] + fn test_known_action_always_passes( + tree_name in "[a-z]{3,10}", + action_name in "[a-z]{3,10}" + ) { + let mut registry = HashSet::new(); + registry.insert(action_name.clone()); + + let tree = Behavior { + name: tree_name, + root: BehaviorNode::Action(action_name, vec![]), + span: Span::new(0, 100), + }; + + let mut collector = ErrorCollector::new(); + validate_behavior_tree_actions(&tree, ®istry, &mut collector); + assert!(!collector.has_errors()); + } + + #[test] + fn test_unknown_action_always_fails( + tree_name in "[a-z]{3,10}", + action_name in "[a-z]{3,10}" + ) { + // Create a registry with some actions, but ensure generated action isn't in it + // Using uppercase letters ensures it won't match the generated lowercase action + let mut registry = HashSet::new(); + registry.insert("WALK".to_string()); + registry.insert("WORK".to_string()); + registry.insert("EAT".to_string()); + + let tree = Behavior { + name: tree_name, + root: BehaviorNode::Action(action_name, vec![]), + span: Span::new(0, 100), + }; + + let mut collector = ErrorCollector::new(); + validate_behavior_tree_actions(&tree, ®istry, &mut collector); + assert!(collector.has_errors()); + } +} diff --git a/src/syntax/ast.rs b/src/syntax/ast.rs new file mode 100644 index 0000000..b446983 --- /dev/null +++ b/src/syntax/ast.rs @@ -0,0 +1,282 @@ +/// Source location for error reporting +#[derive(Debug, Clone, PartialEq)] +pub struct Span { + pub start: usize, + pub end: usize, +} + +impl Span { + pub fn new(start: usize, end: usize) -> Self { + Self { start, end } + } +} + +/// Top-level file containing multiple declarations +#[derive(Debug, Clone, PartialEq)] +pub struct File { + pub declarations: Vec, +} + +/// Any top-level declaration +#[derive(Debug, Clone, PartialEq)] +pub enum Declaration { + Use(UseDecl), + Character(Character), + Template(Template), + LifeArc(LifeArc), + Schedule(Schedule), + Behavior(Behavior), + Institution(Institution), + Relationship(Relationship), + Location(Location), + Species(Species), + Enum(EnumDecl), +} + +/// Use statement for importing definitions +#[derive(Debug, Clone, PartialEq)] +pub struct UseDecl { + pub path: Vec, + pub kind: UseKind, + pub span: Span, +} + +#[derive(Debug, Clone, PartialEq)] +pub enum UseKind { + Single, // use foo::bar + Grouped(Vec), // use foo::{bar, baz} + Wildcard, // use foo::* +} + +/// Character definition +#[derive(Debug, Clone, PartialEq)] +pub struct Character { + pub name: String, + pub fields: Vec, + pub template: Option>, // `from Template1, Template2` + pub span: Span, +} + +/// Template definition (like Character but allows range values) +#[derive(Debug, Clone, PartialEq)] +pub struct Template { + pub name: String, + pub fields: Vec, + pub strict: bool, + pub includes: Vec, + pub span: Span, +} + +/// Field in a structured definition +#[derive(Debug, Clone, PartialEq)] +pub struct Field { + pub name: String, + pub value: Value, + pub span: Span, +} + +/// Field value types +#[derive(Debug, Clone, PartialEq)] +pub enum Value { + Int(i64), + Float(f64), + String(String), + Bool(bool), + Range(Box, Box), // For templates: 20..40 + Time(Time), + Duration(Duration), + Identifier(Vec), // Qualified path reference + List(Vec), + Object(Vec), + ProseBlock(ProseBlock), + Override(Override), +} + +/// Time literal (HH:MM or HH:MM:SS) +#[derive(Debug, Clone, PartialEq)] +pub struct Time { + pub hour: u8, + pub minute: u8, + pub second: u8, +} + +/// Duration literal (e.g., 2h30m) +#[derive(Debug, Clone, PartialEq)] +pub struct Duration { + pub hours: u32, + pub minutes: u32, + pub seconds: u32, +} + +/// Prose block with tag +#[derive(Debug, Clone, PartialEq)] +pub struct ProseBlock { + pub tag: String, + pub content: String, + pub span: Span, +} + +/// Override specification for template instantiation +#[derive(Debug, Clone, PartialEq)] +pub struct Override { + pub base: Vec, // Template path + pub overrides: Vec, + pub span: Span, +} + +#[derive(Debug, Clone, PartialEq)] +pub enum OverrideOp { + Set(Field), // field: value + Remove(String), // remove field + Append(Field), // append field +} + +/// Life arc state machine +#[derive(Debug, Clone, PartialEq)] +pub struct LifeArc { + pub name: String, + pub states: Vec, + pub span: Span, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct ArcState { + pub name: String, + pub transitions: Vec, + pub span: Span, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct Transition { + pub to: String, + pub condition: Expr, + pub span: Span, +} + +/// Schedule definition +#[derive(Debug, Clone, PartialEq)] +pub struct Schedule { + pub name: String, + pub blocks: Vec, + pub span: Span, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct ScheduleBlock { + pub start: Time, + pub end: Time, + pub activity: String, + pub span: Span, +} + +/// Behavior tree definition +#[derive(Debug, Clone, PartialEq)] +pub struct Behavior { + pub name: String, + pub root: BehaviorNode, + pub span: Span, +} + +#[derive(Debug, Clone, PartialEq)] +pub enum BehaviorNode { + Selector(Vec), // ? operator + Sequence(Vec), // > operator (context-dependent) + Condition(Expr), + Action(String, Vec), // Action name + parameters + Decorator(String, Box), + SubTree(Vec), // Reference to another behavior +} + +/// Institution definition +#[derive(Debug, Clone, PartialEq)] +pub struct Institution { + pub name: String, + pub fields: Vec, + pub span: Span, +} + +/// Relationship definition +#[derive(Debug, Clone, PartialEq)] +pub struct Relationship { + pub name: String, + pub participants: Vec, + pub fields: Vec, + pub span: Span, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct Participant { + pub role: Option, // "as parent" + pub name: Vec, // Qualified path + pub self_block: Option>, + pub other_block: Option>, + pub span: Span, +} + +/// Location definition +#[derive(Debug, Clone, PartialEq)] +pub struct Location { + pub name: String, + pub fields: Vec, + pub span: Span, +} + +/// Species definition +#[derive(Debug, Clone, PartialEq)] +pub struct Species { + pub name: String, + pub fields: Vec, + pub span: Span, +} + +/// Enum definition +#[derive(Debug, Clone, PartialEq)] +pub struct EnumDecl { + pub name: String, + pub variants: Vec, + pub span: Span, +} + +/// Expression AST for conditions and queries +#[derive(Debug, Clone, PartialEq)] +pub enum Expr { + IntLit(i64), + FloatLit(f64), + StringLit(String), + BoolLit(bool), + Identifier(Vec), + FieldAccess(Box, String), + Comparison(Box, CompOp, Box), + Logical(Box, LogicalOp, Box), + Unary(UnaryOp, Box), + Quantifier(QuantifierKind, String, Box, Box), /* forall/exists x in collection: + * predicate */ +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum CompOp { + Eq, // == + Ne, // != + Lt, // < + Le, // <= + Gt, // > + Ge, // >= +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum LogicalOp { + And, + Or, +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum UnaryOp { + Not, + Neg, +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum QuantifierKind { + ForAll, + Exists, +} diff --git a/src/syntax/lexer.rs b/src/syntax/lexer.rs new file mode 100644 index 0000000..6ca144a --- /dev/null +++ b/src/syntax/lexer.rs @@ -0,0 +1,424 @@ +use std::fmt; + +use logos::Logos; + +/// Token types for the Storybook language +#[derive(Logos, Debug, Clone, PartialEq)] +#[logos(skip r"[ \t\n\f]+")] // Skip whitespace +#[logos(skip r"//[^\n]*")] // Skip line comments +#[logos(skip r"/\*([^*]|\*[^/])*\*/")] // Skip block comments +pub enum Token { + // Keywords + #[token("use")] + Use, + #[token("character")] + Character, + #[token("template")] + Template, + #[token("life_arc")] + LifeArc, + #[token("schedule")] + Schedule, + #[token("behavior")] + Behavior, + #[token("institution")] + Institution, + #[token("relationship")] + Relationship, + #[token("location")] + Location, + #[token("species")] + Species, + #[token("enum")] + Enum, + #[token("state")] + State, + #[token("on")] + On, + #[token("as")] + As, + #[token("self")] + SelfKw, + #[token("other")] + Other, + #[token("remove")] + Remove, + #[token("append")] + Append, + #[token("forall")] + ForAll, + #[token("exists")] + Exists, + #[token("in")] + In, + #[token("where")] + Where, + #[token("and")] + And, + #[token("or")] + Or, + #[token("not")] + Not, + #[token("strict")] + Strict, + #[token("include")] + Include, + #[token("from")] + From, + #[token("is")] + Is, + #[token("true")] + True, + #[token("false")] + False, + + // Identifiers and literals + #[regex(r"[a-zA-Z_][a-zA-Z0-9_]*", |lex| lex.slice().to_string())] + Ident(String), + + #[regex(r"-?[0-9]+", |lex| lex.slice().parse::().ok())] + IntLit(i64), + + #[regex(r"-?[0-9]+\.[0-9]+", |lex| lex.slice().parse::().ok())] + FloatLit(f64), + + #[regex(r#""([^"\\]|\\.)*""#, |lex| { + let s = lex.slice(); + s[1..s.len()-1].to_string() + })] + StringLit(String), + + // Time literal: HH:MM or HH:MM:SS + #[regex(r"[0-9]{2}:[0-9]{2}(:[0-9]{2})?", |lex| lex.slice().to_string())] + TimeLit(String), + + // Duration literal: e.g., 2h30m, 45m, 1h + #[regex(r"[0-9]+[hms]([0-9]+[hms])*", |lex| lex.slice().to_string())] + DurationLit(String), + + // Punctuation + #[token("{")] + LBrace, + #[token("}")] + RBrace, + #[token("(")] + LParen, + #[token(")")] + RParen, + #[token("[")] + LBracket, + #[token("]")] + RBracket, + #[token(":")] + Colon, + #[token("::")] + ColonColon, + #[token(";")] + Semicolon, + #[token(",")] + Comma, + #[token(".")] + Dot, + #[token("..")] + DotDot, + #[token("*")] + Star, + #[token("?")] + Question, + #[token("@")] + At, + + // Operators + #[token(">")] + Gt, + #[token(">=")] + Ge, + #[token("<")] + Lt, + #[token("<=")] + Le, + #[token("->")] + Arrow, + + // Special markers + #[token("---")] + ProseMarker, + + // Prose block (handled specially) + ProseBlock(super::ast::ProseBlock), + + // Error token + Error, +} + +impl fmt::Display for Token { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + | Token::Ident(s) => write!(f, "identifier '{}'", s), + | Token::IntLit(n) => write!(f, "integer {}", n), + | Token::FloatLit(n) => write!(f, "float {}", n), + | Token::StringLit(s) => write!(f, "string \"{}\"", s), + | Token::TimeLit(s) => write!(f, "time {}", s), + | Token::DurationLit(s) => write!(f, "duration {}", s), + | Token::ProseBlock(pb) => write!(f, "prose block ---{}", pb.tag), + | _ => write!(f, "{:?}", self), + } + } +} + +/// Lexer state machine for handling prose blocks +#[derive(Debug, Clone)] +enum LexerState { + Normal, + ProseTag, // After seeing first --- + ProseContent(String, usize), // Tag + content start position +} + +/// Wrapper lexer that handles two-mode scanning +pub struct Lexer<'a> { + source: &'a str, + position: usize, + state: LexerState, + normal_lexer: Option>, +} + +impl<'a> Lexer<'a> { + pub fn new(source: &'a str) -> Self { + Self { + source, + position: 0, + state: LexerState::Normal, + normal_lexer: Some(Token::lexer(source)), + } + } + + fn scan_prose_tag(&mut self) -> Option<(usize, Token, usize)> { + let _start = self.position; + self.position += 3; // Skip --- + + // Skip whitespace + while self.position < self.source.len() && + self.source[self.position..].starts_with(|c: char| c.is_whitespace()) + { + self.position += 1; + } + + // Read tag until whitespace or newline + let tag_start = self.position; + while self.position < self.source.len() { + let ch = self.source[self.position..].chars().next().unwrap(); + if ch.is_whitespace() { + break; + } + self.position += ch.len_utf8(); + } + + let tag = self.source[tag_start..self.position].to_string(); + + // Skip to end of line + while self.position < self.source.len() { + let ch = self.source[self.position..].chars().next().unwrap(); + if ch == '\n' { + self.position += 1; + break; + } + self.position += ch.len_utf8(); + } + + self.state = LexerState::ProseContent(tag, self.position); + self.next() + } + + fn scan_prose_content( + &mut self, + tag: String, + content_start: usize, + ) -> Option<(usize, Token, usize)> { + let remaining = &self.source[content_start..]; + let mut byte_offset = 0; + + // Scan until we find closing --- + while byte_offset < remaining.len() { + if remaining[byte_offset..].starts_with("---") { + // Check if it's at start of line (or after whitespace) + let is_line_start = byte_offset == 0 || + remaining[..byte_offset] + .chars() + .rev() + .take_while(|&c| c != '\n') + .all(|c| c.is_whitespace()); + + if is_line_start { + // Found closing marker + let content_end = content_start + byte_offset; + let content = self.source[content_start..content_end] + .trim_end() + .to_string(); + let start = content_start.saturating_sub(tag.len() + 4); // Include opening ---tag + self.position = content_end + 3; // Skip closing --- + self.state = LexerState::Normal; + self.normal_lexer = Some(Token::lexer(&self.source[self.position..])); + + let prose_block = super::ast::ProseBlock { + tag, + content, + span: super::ast::Span::new(start, self.position), + }; + return Some((start, Token::ProseBlock(prose_block), self.position)); + } + } + + // Advance by one UTF-8 character to avoid char boundary issues + if let Some(ch) = remaining[byte_offset..].chars().next() { + byte_offset += ch.len_utf8(); + } else { + break; + } + } + + // EOF reached without closing marker - treat as error + None + } +} + +impl<'a> Iterator for Lexer<'a> { + type Item = (usize, Token, usize); + + fn next(&mut self) -> Option { + match &self.state { + | LexerState::Normal => { + let lexer = self.normal_lexer.as_mut()?; + + let token = lexer.next()?; + let span = lexer.span(); + + match token { + | Ok(Token::ProseMarker) => { + // Switch to prose mode + let marker_pos = span.start; + self.position = marker_pos; + self.state = LexerState::ProseTag; + self.normal_lexer = None; + self.scan_prose_tag() + }, + | Ok(tok) => { + self.position = span.end; + Some((span.start, tok, span.end)) + }, + | Err(_) => { + self.position = span.end; + Some((span.start, Token::Error, span.end)) + }, + } + }, + | LexerState::ProseTag => { + // Should not happen - scan_prose_tag transitions state + None + }, + | LexerState::ProseContent(tag, content_start) => { + let tag = tag.clone(); + let content_start = *content_start; + self.scan_prose_content(tag, content_start) + }, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_basic_tokens() { + let input = "character Martha { age: 34 }"; + let lexer = Lexer::new(input); + let tokens: Vec = lexer.map(|(_, tok, _)| tok).collect(); + + assert_eq!( + tokens, + vec![ + Token::Character, + Token::Ident("Martha".to_string()), + Token::LBrace, + Token::Ident("age".to_string()), + Token::Colon, + Token::IntLit(34), + Token::RBrace, + ] + ); + } + + #[test] + fn test_prose_block() { + let input = r#" +---backstory +Martha grew up in a small town. +She loved baking from a young age. +--- +"#; + let lexer = Lexer::new(input.trim()); + let tokens: Vec = lexer.map(|(_, tok, _)| tok).collect(); + + assert_eq!(tokens.len(), 1); + match &tokens[0] { + | Token::ProseBlock(pb) => { + assert_eq!(pb.tag, "backstory"); + assert!(pb.content.contains("Martha grew up")); + assert!(pb.content.contains("young age")); + }, + | _ => panic!("Expected ProseBlock, got {:?}", tokens[0]), + } + } + + #[test] + fn test_prose_with_dashes_in_content() { + let input = r#" +---description +She was well-known for her kind-hearted nature. +The bakery had a no-nonsense policy. +--- +"#; + let lexer = Lexer::new(input.trim()); + let tokens: Vec = lexer.map(|(_, tok, _)| tok).collect(); + + assert_eq!(tokens.len(), 1); + match &tokens[0] { + | Token::ProseBlock(pb) => { + assert_eq!(pb.tag, "description"); + assert!(pb.content.contains("well-known")); + assert!(pb.content.contains("kind-hearted")); + assert!(pb.content.contains("no-nonsense")); + }, + | _ => panic!("Expected ProseBlock"), + } + } + + #[test] + fn test_time_duration_literals() { + let input = "08:30 14:45:00 2h30m 45m"; + let lexer = Lexer::new(input); + let tokens: Vec = lexer.map(|(_, tok, _)| tok).collect(); + + assert_eq!( + tokens, + vec![ + Token::TimeLit("08:30".to_string()), + Token::TimeLit("14:45:00".to_string()), + Token::DurationLit("2h30m".to_string()), + Token::DurationLit("45m".to_string()), + ] + ); + } + + #[test] + fn test_range_syntax() { + let input = "20..40"; + let lexer = Lexer::new(input); + let tokens: Vec = lexer.map(|(_, tok, _)| tok).collect(); + + assert_eq!( + tokens, + vec![Token::IntLit(20), Token::DotDot, Token::IntLit(40),] + ); + } +} diff --git a/src/syntax/mod.rs b/src/syntax/mod.rs new file mode 100644 index 0000000..0464cf0 --- /dev/null +++ b/src/syntax/mod.rs @@ -0,0 +1,56 @@ +#![allow(unused_assignments)] // False positives in error enum fields used by thiserror + +pub mod ast; +pub mod lexer; + +// Parser is generated by LALRPOP +#[allow(clippy::all)] +#[allow(unused)] +mod parser; + +pub use parser::FileParser; + +#[cfg(test)] +mod prop_tests; + +use miette::Diagnostic; +use thiserror::Error; + +#[derive(Error, Debug, Diagnostic)] +pub enum ParseError { + #[error("Unexpected token: {token}")] + #[diagnostic(help("Check for syntax errors like missing braces, colons, or semicolons. Common issues: forgetting ':' after field names, missing '}}' to close a block, or using reserved keywords as names."))] + UnexpectedToken { + #[allow(dead_code)] + #[allow(unused_assignments)] + token: String, + #[label("unexpected token here")] + span: miette::SourceSpan, + }, + + #[error("Unexpected end of file")] + #[diagnostic(help("The file ended before a declaration was complete. Check that all blocks are properly closed with '}}', all strings are closed with quotes, and all prose blocks end with '---'."))] + UnexpectedEof { + #[label("file ended here, but expected more input")] + span: miette::SourceSpan, + }, + + #[error("Invalid token")] + #[diagnostic(help("This character or sequence is not valid in Storybook syntax. Common issues: special characters in names (use letters, numbers, and underscores only), unescaped quotes in strings, or invalid time formats."))] + InvalidToken { + #[label("invalid token here")] + span: miette::SourceSpan, + }, + + #[error("Unclosed prose block starting with ---{tag}")] + #[diagnostic(help("Prose blocks must be closed with '---' on its own line. Make sure the closing '---' is at the start of a line with no other text before it."))] + UnclosedProseBlock { + #[allow(dead_code)] + #[allow(unused_assignments)] + tag: String, + #[label("prose block starts here but never closes")] + span: miette::SourceSpan, + }, +} + +pub type ParseResult = Result; diff --git a/src/syntax/parser.lalrpop b/src/syntax/parser.lalrpop new file mode 100644 index 0000000..d5045c3 --- /dev/null +++ b/src/syntax/parser.lalrpop @@ -0,0 +1,520 @@ +use crate::syntax::ast::*; +use crate::syntax::lexer::Token; + +grammar; + +// ===== Top-level ===== + +pub File: File = { + => File { declarations } +}; + +Declaration: Declaration = { + => Declaration::Use(u), + => Declaration::Character(c), + => Declaration::Template(t), + => Declaration::LifeArc(l), + => Declaration::Schedule(s), + => Declaration::Behavior(b), + => Declaration::Institution(i), + => Declaration::Relationship(r), + => Declaration::Location(loc), + => Declaration::Species(sp), + => Declaration::Enum(e), +}; + +// ===== Use declarations ===== + +UseDecl: UseDecl = { + "use" ";" => UseDecl { + path, + kind: UseKind::Single, + span: Span::new(0, 0), // TODO: track actual spans + }, + "use" "::" "{" > "}" ";" => UseDecl { + path: base, + kind: UseKind::Grouped(items), + span: Span::new(0, 0), + }, + "use" "::" "*" ";" => UseDecl { + path, + kind: UseKind::Wildcard, + span: Span::new(0, 0), + }, +}; + +Path: Vec = { + +}; + +PathSegments: Vec = { + => vec![<>], + "::" => { + v.push(i); + v + } +}; + +// ===== Character ===== + +Character: Character = { + "character" "{" "}" => Character { + name, + fields, + template, + span: Span::new(0, 0), + } +}; + +TemplateClause: Vec = { + "from" )*> => { + let mut templates = vec![t]; + templates.extend(rest); + templates + } +}; + +// ===== Template ===== + +Template: Template = { + "template" "{" "}" => Template { + name, + fields, + strict: strict.is_some(), + includes, + span: Span::new(0, 0), + } +}; + +Include: String = { + "include" => name +}; + +// ===== Fields ===== + +Field: Field = { + ":" => Field { + name, + value, + span: Span::new(0, 0), + } +}; + +Value: Value = { + => Value::Int(<>), + => Value::Float(<>), + => Value::String(<>), + => Value::Bool(<>), + ".." => Value::Range( + Box::new(Value::Int(lo)), + Box::new(Value::Int(hi)) + ), + ".." => Value::Range( + Box::new(Value::Float(lo)), + Box::new(Value::Float(hi)) + ), + => Value::Time(t), + => Value::Duration(d), + => Value::Identifier(p), + => Value::ProseBlock(<>), + "[" > "]" => Value::List(values), + "{" "}" => Value::Object(fields), + => Value::Override(<>), +}; + +BoolLit: bool = { + "true" => true, + "false" => false, +}; + +Time: Time = { + => { + let parts: Vec<&str> = s.split(':').collect(); + let hour = parts[0].parse().unwrap_or(0); + let minute = parts[1].parse().unwrap_or(0); + let second = if parts.len() > 2 { + parts[2].parse().unwrap_or(0) + } else { + 0 + }; + Time { hour, minute, second } + } +}; + +Duration: Duration = { + => { + let mut hours = 0; + let mut minutes = 0; + let mut seconds = 0; + + let mut num = String::new(); + for ch in s.chars() { + if ch.is_ascii_digit() { + num.push(ch); + } else { + let val: u32 = num.parse().unwrap_or(0); + match ch { + 'h' => hours = val, + 'm' => minutes = val, + 's' => seconds = val, + _ => {} + } + num.clear(); + } + } + + Duration { hours, minutes, seconds } + } +}; + +ProseBlock: ProseBlock = { + ProseBlockToken +}; + +Override: Override = { + "@" "{" "}" => Override { + base, + overrides, + span: Span::new(0, 0), + } +}; + +OverrideOp: OverrideOp = { + "remove" => OverrideOp::Remove(name), + "append" => OverrideOp::Append(f), + => OverrideOp::Set(f), +}; + +// ===== Life Arc ===== + +LifeArc: LifeArc = { + "life_arc" "{" "}" => LifeArc { + name, + states, + span: Span::new(0, 0), + } +}; + +ArcState: ArcState = { + "state" "{" "}" => ArcState { + name, + transitions, + span: Span::new(0, 0), + } +}; + +Transition: Transition = { + "on" "->" => Transition { + to, + condition: cond, + span: Span::new(0, 0), + } +}; + +// ===== Schedule ===== + +Schedule: Schedule = { + "schedule" "{" "}" => Schedule { + name, + blocks, + span: Span::new(0, 0), + } +}; + +ScheduleBlock: ScheduleBlock = { + "->" ":" => ScheduleBlock { + start, + end, + activity, + span: Span::new(0, 0), + } +}; + +// ===== Behavior Trees ===== + +Behavior: Behavior = { + "behavior" "{" "}" => Behavior { + name, + root, + span: Span::new(0, 0), + } +}; + +BehaviorNode: BehaviorNode = { + , + , + , + , +}; + +SelectorNode: BehaviorNode = { + "?" "{" "}" => BehaviorNode::Selector(nodes), +}; + +SequenceNode: BehaviorNode = { + ">" "{" "}" => BehaviorNode::Sequence(nodes), +}; + +ActionNode: BehaviorNode = { + "(" > ")" => BehaviorNode::Action(name, params), + => BehaviorNode::Action(name, vec![]), +}; + +SubTreeNode: BehaviorNode = { + "@" => BehaviorNode::SubTree(path), +}; + +// ===== Institution ===== + +Institution: Institution = { + "institution" "{" "}" => Institution { + name, + fields, + span: Span::new(0, 0), + } +}; + +// ===== Relationship ===== + +Relationship: Relationship = { + "relationship" "{" "}" => Relationship { + name, + participants, + fields, + span: Span::new(0, 0), + } +}; + +Participant: Participant = { + )?> => Participant { + role, + name, + self_block, + other_block, + span: Span::new(0, 0), + } +}; + +SelfBlock: Vec = { + "self" "{" "}" => fields +}; + +OtherBlock: Vec = { + "other" "{" "}" => fields +}; + +// ===== Location ===== + +Location: Location = { + "location" "{" "}" => Location { + name, + fields, + span: Span::new(0, 0), + } +}; + +// ===== Species ===== + +Species: Species = { + "species" "{" "}" => Species { + name, + fields, + span: Span::new(0, 0), + } +}; + +// ===== Enum ===== + +EnumDecl: EnumDecl = { + "enum" "{" > "}" => EnumDecl { + name, + variants, + span: Span::new(0, 0), + } +}; + +// ===== Expressions ===== +// Expression grammar with proper precedence: +// or > and > not > field_access > comparison > term + +Expr: Expr = { + , +}; + +// Logical OR (lowest precedence) +OrExpr: Expr = { + "or" => { + Expr::Logical( + Box::new(left), + LogicalOp::Or, + Box::new(right) + ) + }, + , +}; + +// Logical AND +AndExpr: Expr = { + "and" => { + Expr::Logical( + Box::new(left), + LogicalOp::And, + Box::new(right) + ) + }, + , +}; + +// Unary NOT +NotExpr: Expr = { + "not" => { + Expr::Unary( + UnaryOp::Not, + Box::new(expr) + ) + }, + , +}; + +// Comparison expressions +ComparisonExpr: Expr = { + // Equality: field access or path is (literal or identifier) + "is" => { + Expr::Comparison( + Box::new(left), + CompOp::Eq, + Box::new(right) + ) + }, + // Comparison: field access or path > literal/identifier, etc. + => { + Expr::Comparison( + Box::new(left), + op, + Box::new(right) + ) + }, + // Just a field access expression + , +}; + +// Field access with dot notation (binds tightest) +FieldAccessExpr: Expr = { + "." => { + Expr::FieldAccess( + Box::new(base), + field + ) + }, + , +}; + +// Primary expressions (atoms) +PrimaryExpr: Expr = { + "self" => Expr::Identifier(vec!["self".to_string()]), + "other" => Expr::Identifier(vec!["other".to_string()]), + , + => Expr::Identifier(<>), +}; + +InequalityOp: CompOp = { + ">" => CompOp::Gt, + ">=" => CompOp::Ge, + "<" => CompOp::Lt, + "<=" => CompOp::Le, +}; + +Literal: Expr = { + => Expr::IntLit(<>), + => Expr::FloatLit(<>), + => Expr::StringLit(<>), + => Expr::BoolLit(<>), +}; + +// ===== Helpers ===== + +Comma: Vec = { + ",")*> => match e { + None => v, + Some(e) => { + let mut v = v; + v.push(e); + v + } + } +}; + +// ===== Token conversion ===== + +extern { + type Location = usize; + type Error = crate::syntax::ParseError; + + enum Token { + // Keywords + "use" => Token::Use, + "character" => Token::Character, + "template" => Token::Template, + "life_arc" => Token::LifeArc, + "schedule" => Token::Schedule, + "behavior" => Token::Behavior, + "institution" => Token::Institution, + "relationship" => Token::Relationship, + "location" => Token::Location, + "species" => Token::Species, + "enum" => Token::Enum, + "state" => Token::State, + "on" => Token::On, + "as" => Token::As, + "self" => Token::SelfKw, + "other" => Token::Other, + "remove" => Token::Remove, + "append" => Token::Append, + "forall" => Token::ForAll, + "exists" => Token::Exists, + "in" => Token::In, + "where" => Token::Where, + "and" => Token::And, + "or" => Token::Or, + "not" => Token::Not, + "strict" => Token::Strict, + "include" => Token::Include, + "from" => Token::From, + "is" => Token::Is, + "true" => Token::True, + "false" => Token::False, + + // Literals + Ident => Token::Ident(), + IntLit => Token::IntLit(), + FloatLit => Token::FloatLit(), + StringLit => Token::StringLit(), + TimeLit => Token::TimeLit(), + DurationLit => Token::DurationLit(), + ProseBlockToken => Token::ProseBlock(), + + // Punctuation + "{" => Token::LBrace, + "}" => Token::RBrace, + "(" => Token::LParen, + ")" => Token::RParen, + "[" => Token::LBracket, + "]" => Token::RBracket, + ":" => Token::Colon, + "::" => Token::ColonColon, + ";" => Token::Semicolon, + "," => Token::Comma, + "." => Token::Dot, + ".." => Token::DotDot, + "*" => Token::Star, + "?" => Token::Question, + "@" => Token::At, + + // Operators + ">" => Token::Gt, + ">=" => Token::Ge, + "<" => Token::Lt, + "<=" => Token::Le, + "->" => Token::Arrow, + } +} diff --git a/src/syntax/parser.rs b/src/syntax/parser.rs new file mode 100644 index 0000000..bfd5496 --- /dev/null +++ b/src/syntax/parser.rs @@ -0,0 +1,10846 @@ +// auto-generated: "lalrpop 0.21.0" +// sha3: b3f8b7d69a1a61cf3e0d8ae0f0f9ec13e8688b76ec2b8c6502a3cc27e1279444 +use crate::syntax::{ + ast::*, + lexer::Token, +}; +#[allow(unused_extern_crates)] +extern crate lalrpop_util as __lalrpop_util; +#[allow(unused_imports)] +use self::__lalrpop_util::state_machine as __state_machine; +#[allow(unused_extern_crates)] +extern crate alloc; + +#[rustfmt::skip] +#[allow(explicit_outlives_requirements, non_snake_case, non_camel_case_types, unused_mut, unused_variables, unused_imports, unused_parens, clippy::needless_lifetimes, clippy::type_complexity, clippy::needless_return, clippy::too_many_arguments, clippy::never_loop, clippy::match_single_binding, clippy::needless_raw_string_hashes)] +mod __parse__File { + + use crate::syntax::ast::*; + use crate::syntax::lexer::Token; + #[allow(unused_extern_crates)] + extern crate lalrpop_util as __lalrpop_util; + #[allow(unused_imports)] + use self::__lalrpop_util::state_machine as __state_machine; + #[allow(unused_extern_crates)] + extern crate alloc; + use super::__ToTriple; + #[allow(dead_code)] + pub(crate) enum __Symbol<> + { + Variant0(Token), + Variant1(String), + Variant2(i64), + Variant3(f64), + Variant4(ProseBlock), + Variant5(Option), + Variant6(alloc::vec::Vec), + Variant7(Option), + Variant8(Field), + Variant9(alloc::vec::Vec), + Variant10(Value), + Variant11(alloc::vec::Vec), + Variant12(BehaviorNode), + Variant13(Expr), + Variant14(ArcState), + Variant15(alloc::vec::Vec), + Variant16(Behavior), + Variant17(alloc::vec::Vec), + Variant18(bool), + Variant19(Character), + Variant20(Vec), + Variant21(Vec), + Variant22(Vec), + Variant23(Declaration), + Variant24(alloc::vec::Vec), + Variant25(Duration), + Variant26(EnumDecl), + Variant27(Option), + Variant28(File), + Variant29(CompOp), + Variant30(Institution), + Variant31(LifeArc), + Variant32(Location), + Variant33(Option>), + Variant34(Override), + Variant35(OverrideOp), + Variant36(alloc::vec::Vec), + Variant37(Participant), + Variant38(alloc::vec::Vec), + Variant39(Relationship), + Variant40(Schedule), + Variant41(ScheduleBlock), + Variant42(alloc::vec::Vec), + Variant43(Species), + Variant44(Template), + Variant45(Option>), + Variant46(Time), + Variant47(Transition), + Variant48(alloc::vec::Vec), + Variant49(UseDecl), + Variant50(Option), + } + const __ACTION: &[i16] = &[ + // State 0 + 3, 82, 90, 85, 88, 81, 84, 87, 86, 89, 83, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 1 + 3, 82, 90, 85, 88, 81, 84, 87, 86, 89, 83, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 2 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 3 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 105, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 4 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 116, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 115, 15, 114, 0, 0, 0, 0, + // State 5 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 118, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 6 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 122, 0, 0, 0, 0, 0, 0, 0, -54, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 7 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 123, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 8 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 126, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 9 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 127, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 10 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 11 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 133, 0, 0, 0, 132, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 12 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 134, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 13 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 136, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 137, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 14 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 15 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 142, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 16 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -175, 0, 0, 0, 0, 0, 0, 0, 0, 144, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 17 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 146, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 18 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 150, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 19 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 152, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 20 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 154, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 21 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 157, 0, 0, 0, 0, 0, 0, 0, 156, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 22 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 159, 161, 160, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -138, 0, 0, 0, 0, 0, 0, 0, -138, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 23 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 133, 0, 0, 0, 164, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 24 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 165, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 25 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 136, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 166, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 26 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 167, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 27 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 136, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 169, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 28 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 122, 0, 0, 0, 0, 0, 0, 0, -54, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 29 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 116, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 115, 15, 114, 0, 0, 0, 0, + // State 30 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 116, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 115, 15, 114, 0, 0, 0, 0, + // State 31 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 0, 0, -50, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 32 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 176, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 33 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 187, 186, 103, 190, 189, 192, 133, 188, 191, 46, 0, 0, 0, 45, 0, 0, 0, 0, 0, 0, 0, 0, 0, 44, 0, 0, 0, 0, 0, + // State 34 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 194, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 35 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 160, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -137, 0, 0, 0, 0, 0, 0, 0, -137, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 36 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 37 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 197, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 38 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 136, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 198, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 39 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 199, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 40 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 116, 0, 0, 0, 0, 0, 0, 0, 202, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 115, 15, 114, 0, 0, 0, 0, + // State 41 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 116, 0, 0, 0, 0, 0, 0, 0, 203, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 115, 15, 114, 0, 0, 0, 0, + // State 42 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 0, 0, -52, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 43 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 44 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 187, 186, 103, 190, 189, 192, 133, 188, 191, 46, 0, 0, 0, 45, -58, 0, 0, 0, 0, 0, 0, 0, 0, 44, 0, 0, 0, 0, 0, + // State 45 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 211, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 46 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 215, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 47 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 161, 160, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -134, 0, 0, 0, 0, 0, 0, 0, -134, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 48 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 217, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 49 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 218, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 50 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 220, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 51 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 187, 186, 103, 190, 189, 192, 133, 188, 191, 46, 0, 0, 0, 45, -60, 0, 0, 0, 0, 0, 0, 0, 0, 44, 0, 0, 0, 0, 0, + // State 52 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 226, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 53 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 230, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 54 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 241, 240, 0, 0, 0, 0, 0, 0, 0, 0, 61, 0, 0, 0, 0, 187, 186, 103, 243, 242, 244, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 55 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 160, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -133, 0, 0, 0, 0, 0, 0, 0, -133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 56 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 246, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 57 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 247, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 58 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 251, 63, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 252, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 59 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -63, -63, 0, 0, 0, 0, 66, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 0, 0, 0, 258, 259, 256, 257, -63, + // State 60 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 241, 240, 0, 0, 0, 0, 0, 0, 0, 0, 61, 0, 0, 0, 0, 187, 186, 103, 243, 242, 244, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 61 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 251, 63, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 262, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 62 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 119, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 63 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 241, 240, 0, 0, 0, 0, 0, 0, 0, 0, 61, 0, 0, 0, 0, 187, 186, 103, 243, 242, 244, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 64 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 241, 240, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 187, 186, 103, 243, 242, 244, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 65 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 241, 240, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 187, 186, 103, 243, 242, 244, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 66 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 241, 240, 0, 0, 0, 0, 0, 0, 0, 0, 61, 0, 0, 0, 0, 187, 186, 103, 243, 242, 244, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 67 + -69, -69, -69, -69, -69, -69, -69, -69, -69, -69, -69, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 68 + -65, -65, -65, -65, -65, -65, -65, -65, -65, -65, -65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 69 + -77, -77, -77, -77, -77, -77, -77, -77, -77, -77, -77, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 70 + -74, -74, -74, -74, -74, -74, -74, -74, -74, -74, -74, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 71 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 72 + -70, -70, -70, -70, -70, -70, -70, -70, -70, -70, -70, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 73 + -67, -67, -67, -67, -67, -67, -67, -67, -67, -67, -67, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 74 + -72, -72, -72, -72, -72, -72, -72, -72, -72, -72, -72, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 75 + -71, -71, -71, -71, -71, -71, -71, -71, -71, -71, -71, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 76 + -68, -68, -68, -68, -68, -68, -68, -68, -68, -68, -68, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 77 + -73, -73, -73, -73, -73, -73, -73, -73, -73, -73, -73, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 78 + -66, -66, -66, -66, -66, -66, -66, -66, -66, -66, -66, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 79 + -64, -64, -64, -64, -64, -64, -64, -64, -64, -64, -64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 80 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 92, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 81 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 82 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 93, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 83 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 94, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 84 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 95, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 85 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 96, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 86 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 97, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 87 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 98, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 88 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 99, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 89 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 90 + -78, -78, -78, -78, -78, -78, -78, -78, -78, -78, -78, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 91 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 92 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 93 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 94 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 95 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 96 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 97 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 98 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 99 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 106, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 100 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 107, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 101 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 108, -141, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 102 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -142, -142, -142, -142, -142, 0, 0, 0, 0, -142, -142, 0, 0, 0, 0, -142, 0, 0, -142, 0, 0, 0, 0, 0, 0, -142, -142, 0, -142, 0, -142, 0, -142, -142, -142, -142, 0, 0, -142, -142, -142, -142, -142, -142, -142, + // State 103 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 104 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 105 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 106 + -185, -185, -185, -185, -185, -185, -185, -185, -185, -185, -185, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 107 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 139, 0, 0, 0, 0, 0, 0, 29, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 138, 0, 0, 0, 0, 0, 0, 0, + // State 108 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -39, 0, 0, 0, 0, 0, 0, 0, -39, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -39, -39, -39, 0, 0, 0, 0, + // State 109 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 140, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 110 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -37, 0, 0, 0, 0, 0, 0, 0, -37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -37, -37, -37, 0, 0, 0, 0, + // State 111 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -38, 0, 0, 0, 0, 0, 0, 0, -38, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -38, -38, -38, 0, 0, 0, 0, + // State 112 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -40, 0, 0, 0, 0, 0, 0, 0, -40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -40, -40, -40, 0, 0, 0, 0, + // State 113 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 114 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 115 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -27, 0, 0, 0, 0, 0, 0, 0, -27, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -27, -27, -27, 0, 0, 0, 0, + // State 116 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -85, 0, 0, 0, 0, 0, 0, 0, -85, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 117 + -46, -46, -46, -46, -46, -46, -46, -46, -46, -46, -46, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 118 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 119 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 147, 0, 0, 0, 0, 0, 0, 0, -56, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 120 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 121 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -53, 0, 0, 0, 0, 0, 0, 0, 149, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 122 + -104, -104, -104, -104, -104, -104, -104, -104, -104, -104, -104, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 123 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 124 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 153, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 125 + -106, -106, -106, -106, -106, -106, -106, -106, -106, -106, -106, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 126 + -112, -112, -112, -112, -112, -112, -112, -112, -112, -112, -112, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 127 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -139, 0, 0, 0, 0, 0, 0, 0, -139, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 128 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -141, -141, -141, -141, -141, 0, 0, 0, 0, -141, -141, 0, 0, 0, 0, -141, 0, 0, -141, 0, 0, 0, 0, 0, 0, -141, -141, 0, -141, 0, -141, 0, 162, 0, -141, -141, 0, 0, -141, -141, -141, -141, -141, -141, -141, + // State 129 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -156, 0, 0, 0, -156, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 130 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 37, + // State 131 + -151, -151, -151, -151, -151, -151, -151, -151, -151, -151, -151, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 132 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -179, -179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -179, 0, 0, 0, 0, 0, 0, 0, -179, 0, -179, 0, -179, -179, 0, 0, -179, 0, 0, 0, 0, 0, 0, 0, 0, 0, -179, + // State 133 + -164, -164, -164, -164, -164, -164, -164, -164, -164, -164, -164, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 134 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -98, 0, 0, 0, 0, -98, 0, 0, 0, 0, 0, 0, 0, -98, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 135 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 170, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 136 + -171, -171, -171, -171, -171, -171, -171, -171, -171, -171, -171, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 137 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 171, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 138 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -143, -143, -143, -143, -143, 0, 0, 0, 0, -143, -143, 0, 0, 0, 0, -143, 0, 0, -143, 0, 0, 0, 0, 0, 0, -143, -143, 0, -143, 0, -143, 0, -143, -143, -143, -143, 0, 0, -143, -143, -143, -143, -143, -143, -143, + // State 139 + -36, -36, -36, -36, -36, -36, -36, -36, -36, -36, -36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 140 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -166, 0, 0, 0, 0, 0, 0, 0, -166, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -166, -166, -166, 0, 0, 0, 0, + // State 141 + -45, -45, -45, -45, -45, -45, -45, -45, -45, -45, -45, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 142 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -176, 0, 0, 0, 0, 0, 0, 0, 0, 177, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 143 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 178, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 144 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -86, 0, 0, 0, 0, 0, 0, 0, -86, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 145 + -48, -48, -48, -48, -48, -48, -48, -48, -48, -48, -48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 146 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -55, 0, 0, 0, 0, 0, 0, 0, 193, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 147 + -80, -80, -80, -80, -80, -80, -80, -80, -80, -80, -80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 148 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -19, 0, 0, 0, 0, 0, 0, 0, -19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 149 + -105, -105, -105, -105, -105, -105, -105, -105, -105, -105, -105, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 150 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -35, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -35, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 151 + -107, -107, -107, -107, -107, -107, -107, -107, -107, -107, -107, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 152 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 47, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 153 + -113, -113, -113, -113, -113, -113, -113, -113, -113, -113, -113, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 154 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -140, 0, 0, 0, 0, 0, 0, 0, -140, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 155 + -149, -149, -149, -149, -149, -149, -149, -149, -149, -149, -149, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 156 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -142, -142, -142, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -142, 0, 0, 0, 0, 0, 0, 0, -142, 0, 0, 0, 0, 34, -142, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 157 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -136, 0, 0, 0, 0, 0, 0, 0, -136, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 158 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 159 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 160 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 50, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 161 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 139, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 162 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -157, 0, 0, 0, -157, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 163 + -152, -152, -152, -152, -152, -152, -152, -152, -152, -152, -152, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 164 + -165, -165, -165, -165, -165, -165, -165, -165, -165, -165, -165, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 165 + -167, -167, -167, -167, -167, -167, -167, -167, -167, -167, -167, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 166 + -173, -173, -173, -173, -173, -173, -173, -173, -173, -173, -173, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 167 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -99, 0, 0, 0, 0, -99, 0, 0, 0, 0, 0, 0, 0, -99, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 168 + -172, -172, -172, -172, -172, -172, -172, -172, -172, -172, -172, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 169 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -95, 0, 0, 0, 0, -95, 0, 0, 0, 0, 0, 0, 0, -95, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 170 + -187, -187, -187, -187, -187, -187, -187, -187, -187, -187, -187, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 171 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 200, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 172 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -41, 0, 0, 0, 0, 0, 0, 0, -41, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -41, -41, -41, 0, 0, 0, 0, + // State 173 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 205, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 174 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -49, 0, 0, 0, 0, 0, 206, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 175 + -47, -47, -47, -47, -47, -47, -47, -47, -47, -47, -47, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 176 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 207, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 177 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -6, 0, 0, 0, 0, 0, 0, 0, 0, -6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 178 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -191, -191, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -191, 0, 0, 0, 0, 0, 0, 0, -191, 0, -191, 0, -191, 0, 0, 0, -191, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 179 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -195, -195, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -195, 0, 0, 0, 0, 0, 0, 0, -195, 0, -195, 0, -195, 0, 0, 0, -195, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 180 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -201, -201, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -201, 0, 0, 0, 0, 0, 0, 0, -201, 0, -201, 0, -201, 0, 0, 0, -201, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 181 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -196, -196, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -196, 0, 0, 0, 0, 0, 0, 0, -196, 0, -196, 0, -196, 0, 0, 0, -196, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 182 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -197, -197, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -197, 0, 0, 0, 0, 0, 0, 0, -197, 0, -197, 0, -197, 0, 0, 0, -197, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 183 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -194, -194, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -194, 0, 0, 0, 0, 0, 0, 0, -194, 0, -194, 0, -194, 0, 0, 0, -194, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 184 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -82, -82, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -82, 0, 0, 0, 0, 0, 0, 0, -82, 0, -82, 0, 0, 0, 0, 0, -82, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 185 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -44, -44, 0, 0, 0, 0, -44, -44, 0, 0, 0, 0, -44, 0, 0, -44, 0, 0, 0, 0, 0, 0, 0, -44, 0, -44, 0, -44, 0, 0, 0, -44, -44, 0, 0, 0, 0, -44, -44, -44, -44, -44, + // State 186 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -43, -43, 0, 0, 0, 0, -43, -43, 0, 0, 0, 0, -43, 0, 0, -43, 0, 0, 0, 0, 0, 0, 0, -43, 0, -43, 0, -43, 0, 0, 0, -43, -43, 0, 0, 0, 0, -43, -43, -43, -43, -43, + // State 187 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -79, -79, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -79, 0, 0, 0, 0, 0, 0, 0, -79, 0, -79, 0, -79, 0, 0, 0, -79, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 188 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -189, -189, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -189, 0, 0, 0, 0, 0, 0, 0, -189, 0, -189, 0, -189, 0, 0, 0, -189, 0, 212, 0, 0, 0, 0, 0, 0, 0, 0, + // State 189 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -188, -188, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -188, 0, 0, 0, 0, 0, 0, 0, -188, 0, -188, 0, -188, 0, 0, 0, -188, 0, 213, 0, 0, 0, 0, 0, 0, 0, 0, + // State 190 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -148, -148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -148, 0, 0, 0, 0, 0, 0, 0, -148, 0, -148, 0, -148, 0, 0, 0, -148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 191 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -190, -190, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -190, 0, 0, 0, 0, 0, 0, 0, -190, 0, -190, 0, -190, 0, 0, 0, -190, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 192 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -20, 0, 0, 0, 0, 0, 0, 0, -20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 193 + -150, -150, -150, -150, -150, -150, -150, -150, -150, -150, -150, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 194 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -135, 0, 0, 0, 0, 0, 0, 0, -135, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 195 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 219, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 196 + -169, -169, -169, -169, -169, -169, -169, -169, -169, -169, -169, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 197 + -168, -168, -168, -168, -168, -168, -168, -168, -168, -168, -168, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 198 + -174, -174, -174, -174, -174, -174, -174, -174, -174, -174, -174, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 199 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 221, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 200 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -42, 0, 0, 0, 0, 0, 0, 0, -42, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -42, -42, -42, 0, 0, 0, 0, + // State 201 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -163, 0, 0, 0, 0, 0, 0, 0, -163, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -163, -163, -163, 0, 0, 0, 0, + // State 202 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -158, 0, 0, 0, 0, 0, 0, 0, -158, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -158, -158, -158, 0, 0, 0, 0, + // State 203 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -51, 0, 0, 0, 0, 0, 222, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 204 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -26, 0, 0, 0, 0, 0, 0, 0, -26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -26, -26, -26, 0, 0, 0, 0, + // State 205 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -14, 0, 0, 0, 0, 0, 0, 0, 0, 0, -14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 206 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -7, 0, 0, 0, 0, 0, 0, 0, 0, -7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 207 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 59, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 208 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 209 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -57, 0, 0, 0, 225, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 210 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -199, -199, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -199, 0, 0, 0, 0, 0, 0, 0, -199, 0, -199, 0, -199, 0, 0, 0, -199, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 211 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 227, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 212 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 228, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 213 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -183, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -183, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 214 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 215 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -132, 0, 0, 0, 0, 0, 0, 0, -132, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 216 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -118, 0, 0, 0, 0, 0, 0, 0, -118, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 217 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -159, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -159, 0, 0, 0, 0, 0, 0, 0, -159, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 218 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 219 + -170, -170, -170, -170, -170, -170, -170, -170, -170, -170, -170, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 220 + -186, -186, -186, -186, -186, -186, -186, -186, -186, -186, -186, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 221 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -15, 0, 0, 0, 0, 0, 0, 0, 0, 0, -15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 222 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -59, 0, 0, 0, 253, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 223 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -198, -198, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -198, 0, 0, 0, 0, 0, 0, 0, -198, 0, -198, 0, -198, 0, 0, 0, -198, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 224 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -24, -24, -24, -24, -24, -24, -24, -24, -24, -24, 0, 0, 0, -24, -24, 0, 0, 0, 0, 0, 0, 0, 0, -24, 0, 0, 0, 0, 0, + // State 225 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -200, -200, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -200, 0, 0, 0, 0, 0, 0, 0, -200, 0, -200, 0, -200, 0, 0, 0, -200, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 226 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -193, -193, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -193, 0, 0, 0, 0, 0, 0, 0, -193, 0, -193, 0, -193, 0, 0, 0, -193, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 227 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -192, -192, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -192, 0, 0, 0, 0, 0, 0, 0, -192, 0, -192, 0, -192, 0, 0, 0, -192, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 228 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -184, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -184, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 229 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 230 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, -117, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -117, + // State 231 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -111, -111, 0, 0, 0, 0, -111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -111, 0, 0, 0, 0, -111, -111, -111, -111, -111, + // State 232 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -115, -115, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -115, + // State 233 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 254, + // State 234 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -146, -146, 0, 0, 0, 0, -146, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -146, 0, 0, 0, 0, -146, -146, -146, -146, -146, + // State 235 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -29, -29, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -29, + // State 236 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 67, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -81, + // State 237 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -147, -147, 0, 0, 0, 0, -147, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -147, 0, 0, 0, 0, -147, -147, -147, -147, -147, + // State 238 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -90, -90, 0, 0, 0, 0, -90, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -90, 0, 0, 0, 0, -90, -90, -90, -90, -90, + // State 239 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -145, -145, 0, 0, 0, 0, -145, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -145, 0, 0, 0, 0, -145, -145, -145, -145, -145, + // State 240 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -144, -144, 0, 0, 0, 0, -144, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -144, 0, 0, 0, 0, -144, -144, -144, -144, -144, + // State 241 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -109, -109, 0, 0, 0, 0, -109, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -109, 0, 0, 0, 0, -109, -109, -109, -109, -109, + // State 242 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -108, -108, 0, 0, 0, 0, -108, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -108, 0, 0, 0, 0, -108, -108, -108, -108, -108, + // State 243 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -110, -110, 0, 0, 0, 0, -110, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -110, 0, 0, 0, 0, -110, -110, -110, -110, -110, + // State 244 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -131, 0, 0, 0, 0, 0, 0, 0, -131, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 245 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -119, 0, 0, 0, 0, 0, 0, 0, -119, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 246 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -160, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -160, 0, 0, 0, 0, 0, 0, 0, -160, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 247 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -153, 0, 0, 0, -153, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 248 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -126, -126, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -126, 0, 0, 0, 0, 0, 0, 0, -126, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 249 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -129, -129, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -129, 0, 0, 0, 0, 0, 0, 0, -129, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 250 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 264, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 251 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -122, -122, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -122, 0, 0, 0, 0, 0, 0, 0, -122, 0, -122, 0, -122, 0, 0, 0, -122, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 252 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -25, -25, -25, -25, -25, -25, -25, -25, -25, -25, 0, 0, 0, -25, -25, 0, 0, 0, 0, 0, 0, 0, 0, -25, 0, 0, 0, 0, 0, + // State 253 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 266, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 254 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 268, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 255 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -102, -102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -102, -102, -102, -102, -102, -102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 256 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -103, -103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -103, -103, -103, -103, -103, -103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 257 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -100, -100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -100, -100, -100, -100, -100, -100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 258 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -101, -101, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -101, -101, -101, -101, -101, -101, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 259 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -114, -114, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -114, + // State 260 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -130, -130, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -130, 0, 0, 0, 0, 0, 0, 0, -130, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 261 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -123, -123, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -123, 0, 0, 0, 0, 0, 0, 0, -123, 0, -123, 0, -123, 0, 0, 0, -123, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 262 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -125, -125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -125, 0, 0, 0, 0, 0, 0, 0, -125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 263 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -124, -124, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -124, 0, 0, 0, 0, 0, 0, 0, -124, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 264 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -28, -28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -28, + // State 265 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -180, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -180, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // State 266 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -62, -62, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0, -62, + // State 267 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -89, -89, 0, 0, 0, 0, -89, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -89, 0, 0, 0, 0, -89, -89, -89, -89, -89, + // State 268 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -61, -61, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 0, 0, 0, 0, 0, 0, 0, -61, + // State 269 + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, -116, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -116, + ]; + fn __action(state: i16, integer: usize) -> i16 { + __ACTION[(state as usize) * 58 + integer] + } + const __EOF_ACTION: &[i16] = &[ + // State 0 + -91, + // State 1 + -92, + // State 2 + 0, + // State 3 + 0, + // State 4 + 0, + // State 5 + 0, + // State 6 + 0, + // State 7 + 0, + // State 8 + 0, + // State 9 + 0, + // State 10 + 0, + // State 11 + 0, + // State 12 + 0, + // State 13 + 0, + // State 14 + 0, + // State 15 + 0, + // State 16 + 0, + // State 17 + 0, + // State 18 + 0, + // State 19 + 0, + // State 20 + 0, + // State 21 + 0, + // State 22 + 0, + // State 23 + 0, + // State 24 + 0, + // State 25 + 0, + // State 26 + 0, + // State 27 + 0, + // State 28 + 0, + // State 29 + 0, + // State 30 + 0, + // State 31 + 0, + // State 32 + 0, + // State 33 + 0, + // State 34 + 0, + // State 35 + 0, + // State 36 + 0, + // State 37 + 0, + // State 38 + 0, + // State 39 + 0, + // State 40 + 0, + // State 41 + 0, + // State 42 + 0, + // State 43 + 0, + // State 44 + 0, + // State 45 + 0, + // State 46 + 0, + // State 47 + 0, + // State 48 + 0, + // State 49 + 0, + // State 50 + 0, + // State 51 + 0, + // State 52 + 0, + // State 53 + 0, + // State 54 + 0, + // State 55 + 0, + // State 56 + 0, + // State 57 + 0, + // State 58 + 0, + // State 59 + 0, + // State 60 + 0, + // State 61 + 0, + // State 62 + 0, + // State 63 + 0, + // State 64 + 0, + // State 65 + 0, + // State 66 + 0, + // State 67 + -69, + // State 68 + -65, + // State 69 + -77, + // State 70 + -74, + // State 71 + -204, + // State 72 + -70, + // State 73 + -67, + // State 74 + -72, + // State 75 + -71, + // State 76 + -68, + // State 77 + -73, + // State 78 + -66, + // State 79 + -64, + // State 80 + 0, + // State 81 + 0, + // State 82 + 0, + // State 83 + 0, + // State 84 + 0, + // State 85 + 0, + // State 86 + 0, + // State 87 + 0, + // State 88 + 0, + // State 89 + 0, + // State 90 + -78, + // State 91 + 0, + // State 92 + 0, + // State 93 + 0, + // State 94 + 0, + // State 95 + 0, + // State 96 + 0, + // State 97 + 0, + // State 98 + 0, + // State 99 + 0, + // State 100 + 0, + // State 101 + 0, + // State 102 + 0, + // State 103 + 0, + // State 104 + 0, + // State 105 + 0, + // State 106 + -185, + // State 107 + 0, + // State 108 + 0, + // State 109 + 0, + // State 110 + 0, + // State 111 + 0, + // State 112 + 0, + // State 113 + 0, + // State 114 + 0, + // State 115 + 0, + // State 116 + 0, + // State 117 + -46, + // State 118 + 0, + // State 119 + 0, + // State 120 + 0, + // State 121 + 0, + // State 122 + -104, + // State 123 + 0, + // State 124 + 0, + // State 125 + -106, + // State 126 + -112, + // State 127 + 0, + // State 128 + 0, + // State 129 + 0, + // State 130 + 0, + // State 131 + -151, + // State 132 + 0, + // State 133 + -164, + // State 134 + 0, + // State 135 + 0, + // State 136 + -171, + // State 137 + 0, + // State 138 + 0, + // State 139 + -36, + // State 140 + 0, + // State 141 + -45, + // State 142 + 0, + // State 143 + 0, + // State 144 + 0, + // State 145 + -48, + // State 146 + 0, + // State 147 + -80, + // State 148 + 0, + // State 149 + -105, + // State 150 + 0, + // State 151 + -107, + // State 152 + 0, + // State 153 + -113, + // State 154 + 0, + // State 155 + -149, + // State 156 + 0, + // State 157 + 0, + // State 158 + 0, + // State 159 + 0, + // State 160 + 0, + // State 161 + 0, + // State 162 + 0, + // State 163 + -152, + // State 164 + -165, + // State 165 + -167, + // State 166 + -173, + // State 167 + 0, + // State 168 + -172, + // State 169 + 0, + // State 170 + -187, + // State 171 + 0, + // State 172 + 0, + // State 173 + 0, + // State 174 + 0, + // State 175 + -47, + // State 176 + 0, + // State 177 + 0, + // State 178 + 0, + // State 179 + 0, + // State 180 + 0, + // State 181 + 0, + // State 182 + 0, + // State 183 + 0, + // State 184 + 0, + // State 185 + 0, + // State 186 + 0, + // State 187 + 0, + // State 188 + 0, + // State 189 + 0, + // State 190 + 0, + // State 191 + 0, + // State 192 + 0, + // State 193 + -150, + // State 194 + 0, + // State 195 + 0, + // State 196 + -169, + // State 197 + -168, + // State 198 + -174, + // State 199 + 0, + // State 200 + 0, + // State 201 + 0, + // State 202 + 0, + // State 203 + 0, + // State 204 + 0, + // State 205 + 0, + // State 206 + 0, + // State 207 + 0, + // State 208 + 0, + // State 209 + 0, + // State 210 + 0, + // State 211 + 0, + // State 212 + 0, + // State 213 + 0, + // State 214 + 0, + // State 215 + 0, + // State 216 + 0, + // State 217 + 0, + // State 218 + 0, + // State 219 + -170, + // State 220 + -186, + // State 221 + 0, + // State 222 + 0, + // State 223 + 0, + // State 224 + 0, + // State 225 + 0, + // State 226 + 0, + // State 227 + 0, + // State 228 + 0, + // State 229 + 0, + // State 230 + 0, + // State 231 + 0, + // State 232 + 0, + // State 233 + 0, + // State 234 + 0, + // State 235 + 0, + // State 236 + 0, + // State 237 + 0, + // State 238 + 0, + // State 239 + 0, + // State 240 + 0, + // State 241 + 0, + // State 242 + 0, + // State 243 + 0, + // State 244 + 0, + // State 245 + 0, + // State 246 + 0, + // State 247 + 0, + // State 248 + 0, + // State 249 + 0, + // State 250 + 0, + // State 251 + 0, + // State 252 + 0, + // State 253 + 0, + // State 254 + 0, + // State 255 + 0, + // State 256 + 0, + // State 257 + 0, + // State 258 + 0, + // State 259 + 0, + // State 260 + 0, + // State 261 + 0, + // State 262 + 0, + // State 263 + 0, + // State 264 + 0, + // State 265 + 0, + // State 266 + 0, + // State 267 + 0, + // State 268 + 0, + // State 269 + 0, + ]; + fn __goto(state: i16, nt: usize) -> i16 { + match nt { + 3 => 142, + 8 => 42, + 11 => 119, + 14 => 51, + 15 => 108, + 16 => match state { + 66 => 269, + _ => 230, + }, + 17 => match state { + 19 => 150, + _ => 123, + }, + 19 => 19, + 20 => 67, + 21 => match state { + 29..=30 => 172, + 40..=41 => 200, + _ => 109, + }, + 22 => match state { + 30 => 41, + _ => 40, + }, + 23 => match state { + 54 | 60 | 63..=66 => 231, + _ => 178, + }, + 24 => 68, + 25 => 173, + 26 => match state { + 28 => 171, + _ => 120, + }, + 27 => 208, + 28 => 232, + 29 => match state { + 1 => 90, + _ => 69, + }, + 31 => 1, + 32 => 179, + 33 => 70, + 34 => 233, + 35 => match state { + 17..=18 | 20 | 24 | 26 | 32 | 34 | 37 | 39 | 50 | 52 | 56..=57 => 144, + 31 => 174, + 42 => 203, + 58 | 61 => 248, + 62 => 262, + _ => 116, + }, + 37 => match state { + 7 => 18, + 9 => 20, + 12 => 24, + 13 => 26, + 15 => 32, + 21 => 34, + 25 => 37, + 27 => 39, + 38 => 50, + 45 => 52, + 48 => 56, + 49 => 57, + _ => 17, + }, + 39 => match state { + 64 => 266, + 65 => 268, + _ => 59, + }, + 40 => 71, + 42 => match state { + 27 | 38 => 167, + _ => 134, + }, + 44 => match state { + 25 => 38, + _ => 27, + }, + 45 => 64, + 46 => 72, + 47 => 73, + 48 => 234, + 49 => 74, + 50 => match state { + 60 => 259, + 63 => 264, + _ => 235, + }, + 51 => 236, + 52 => match state { + 35 => 194, + 47 => 215, + 55 => 244, + _ => 157, + }, + 54 => 180, + 55 => match state { + 61 => 260, + _ => 249, + }, + 57 => 61, + 58 => match state { + 21 => 154, + _ => 127, + }, + 59 => 21, + 60 => match state { + 10 | 21 => 22, + 2 => 100, + 14 => 140, + 43 => 207, + 54 | 60 | 63..=66 => 237, + _ => 181, + }, + 61 => match state { + 2 => 101, + _ => 128, + }, + 62 => 238, + 63 => 182, + 64 => 75, + 65 => 76, + 66 => match state { + 23 => 162, + _ => 129, + }, + 68 => 23, + 69 => 110, + 70 => match state { + 47 => 55, + _ => 35, + }, + 72 => 111, + 73 => 77, + 74 => 112, + 75 => 78, + 76 => 103, + 78 => match state { + 11 | 23 => 130, + 36 => 195, + _ => 183, + }, + 79 => match state { + 53 => 228, + _ => 213, + }, + 81 => 53, + 82 => 79, + 83 => match state { + 44 => 209, + 51 => 222, + _ => 184, + }, + _ => 0, + } + } + const __TERMINAL: &[&str] = &[ + r###""use""###, + r###""character""###, + r###""template""###, + r###""life_arc""###, + r###""schedule""###, + r###""behavior""###, + r###""institution""###, + r###""relationship""###, + r###""location""###, + r###""species""###, + r###""enum""###, + r###""state""###, + r###""on""###, + r###""as""###, + r###""self""###, + r###""other""###, + r###""remove""###, + r###""append""###, + r###""forall""###, + r###""exists""###, + r###""in""###, + r###""where""###, + r###""and""###, + r###""or""###, + r###""not""###, + r###""strict""###, + r###""include""###, + r###""from""###, + r###""is""###, + r###""true""###, + r###""false""###, + r###"Ident"###, + r###"IntLit"###, + r###"FloatLit"###, + r###"StringLit"###, + r###"TimeLit"###, + r###"DurationLit"###, + r###"ProseBlockToken"###, + r###""{""###, + r###""}""###, + r###""(""###, + r###"")""###, + r###""[""###, + r###""]""###, + r###"":""###, + r###""::""###, + r###"";""###, + r###"",""###, + r###"".""###, + r###""..""###, + r###""*""###, + r###""?""###, + r###""@""###, + r###"">""###, + r###"">=""###, + r###""<""###, + r###""<=""###, + r###""->""###, + ]; + fn __expected_tokens(__state: i16) -> alloc::vec::Vec { + __TERMINAL.iter().enumerate().filter_map(|(index, terminal)| { + let next_state = __action(__state, index); + if next_state == 0 { + None + } else { + Some(alloc::string::ToString::to_string(terminal)) + } + }).collect() + } + fn __expected_tokens_from_states< + >( + __states: &[i16], + _: core::marker::PhantomData<()>, + ) -> alloc::vec::Vec + { + __TERMINAL.iter().enumerate().filter_map(|(index, terminal)| { + if __accepts(None, __states, Some(index), core::marker::PhantomData::<()>) { + Some(alloc::string::ToString::to_string(terminal)) + } else { + None + } + }).collect() + } + struct __StateMachine<> + where + { + __phantom: core::marker::PhantomData<()>, + } + impl<> __state_machine::ParserDefinition for __StateMachine<> + where + { + type Location = usize; + type Error = crate::syntax::ParseError; + type Token = Token; + type TokenIndex = usize; + type Symbol = __Symbol<>; + type Success = File; + type StateIndex = i16; + type Action = i16; + type ReduceIndex = i16; + type NonterminalIndex = usize; + + #[inline] + fn start_location(&self) -> Self::Location { + Default::default() + } + + #[inline] + fn start_state(&self) -> Self::StateIndex { + 0 + } + + #[inline] + fn token_to_index(&self, token: &Self::Token) -> Option { + __token_to_integer(token, core::marker::PhantomData::<()>) + } + + #[inline] + fn action(&self, state: i16, integer: usize) -> i16 { + __action(state, integer) + } + + #[inline] + fn error_action(&self, state: i16) -> i16 { + __action(state, 58 - 1) + } + + #[inline] + fn eof_action(&self, state: i16) -> i16 { + __EOF_ACTION[state as usize] + } + + #[inline] + fn goto(&self, state: i16, nt: usize) -> i16 { + __goto(state, nt) + } + + fn token_to_symbol(&self, token_index: usize, token: Self::Token) -> Self::Symbol { + __token_to_symbol(token_index, token, core::marker::PhantomData::<()>) + } + + fn expected_tokens(&self, state: i16) -> alloc::vec::Vec { + __expected_tokens(state) + } + + fn expected_tokens_from_states(&self, states: &[i16]) -> alloc::vec::Vec { + __expected_tokens_from_states(states, core::marker::PhantomData::<()>) + } + + #[inline] + fn uses_error_recovery(&self) -> bool { + false + } + + #[inline] + fn error_recovery_symbol( + &self, + recovery: __state_machine::ErrorRecovery, + ) -> Self::Symbol { + panic!("error recovery not enabled for this grammar") + } + + fn reduce( + &mut self, + action: i16, + start_location: Option<&Self::Location>, + states: &mut alloc::vec::Vec, + symbols: &mut alloc::vec::Vec<__state_machine::SymbolTriple>, + ) -> Option<__state_machine::ParseResult> { + __reduce( + action, + start_location, + states, + symbols, + core::marker::PhantomData::<()>, + ) + } + + fn simulate_reduce(&self, action: i16) -> __state_machine::SimulatedReduce { + __simulate_reduce(action, core::marker::PhantomData::<()>) + } + } + fn __token_to_integer< + >( + __token: &Token, + _: core::marker::PhantomData<()>, + ) -> Option + { + match __token { + Token::Use if true => Some(0), + Token::Character if true => Some(1), + Token::Template if true => Some(2), + Token::LifeArc if true => Some(3), + Token::Schedule if true => Some(4), + Token::Behavior if true => Some(5), + Token::Institution if true => Some(6), + Token::Relationship if true => Some(7), + Token::Location if true => Some(8), + Token::Species if true => Some(9), + Token::Enum if true => Some(10), + Token::State if true => Some(11), + Token::On if true => Some(12), + Token::As if true => Some(13), + Token::SelfKw if true => Some(14), + Token::Other if true => Some(15), + Token::Remove if true => Some(16), + Token::Append if true => Some(17), + Token::ForAll if true => Some(18), + Token::Exists if true => Some(19), + Token::In if true => Some(20), + Token::Where if true => Some(21), + Token::And if true => Some(22), + Token::Or if true => Some(23), + Token::Not if true => Some(24), + Token::Strict if true => Some(25), + Token::Include if true => Some(26), + Token::From if true => Some(27), + Token::Is if true => Some(28), + Token::True if true => Some(29), + Token::False if true => Some(30), + Token::Ident(_) if true => Some(31), + Token::IntLit(_) if true => Some(32), + Token::FloatLit(_) if true => Some(33), + Token::StringLit(_) if true => Some(34), + Token::TimeLit(_) if true => Some(35), + Token::DurationLit(_) if true => Some(36), + Token::ProseBlock(_) if true => Some(37), + Token::LBrace if true => Some(38), + Token::RBrace if true => Some(39), + Token::LParen if true => Some(40), + Token::RParen if true => Some(41), + Token::LBracket if true => Some(42), + Token::RBracket if true => Some(43), + Token::Colon if true => Some(44), + Token::ColonColon if true => Some(45), + Token::Semicolon if true => Some(46), + Token::Comma if true => Some(47), + Token::Dot if true => Some(48), + Token::DotDot if true => Some(49), + Token::Star if true => Some(50), + Token::Question if true => Some(51), + Token::At if true => Some(52), + Token::Gt if true => Some(53), + Token::Ge if true => Some(54), + Token::Lt if true => Some(55), + Token::Le if true => Some(56), + Token::Arrow if true => Some(57), + _ => None, + } + } + fn __token_to_symbol< + >( + __token_index: usize, + __token: Token, + _: core::marker::PhantomData<()>, + ) -> __Symbol<> + { + #[allow(clippy::manual_range_patterns)]match __token_index { + 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 => __Symbol::Variant0(__token), + 31 | 34 | 35 | 36 => match __token { + Token::Ident(__tok0) | Token::StringLit(__tok0) | Token::TimeLit(__tok0) | Token::DurationLit(__tok0) if true => __Symbol::Variant1(__tok0), + _ => unreachable!(), + }, + 32 => match __token { + Token::IntLit(__tok0) if true => __Symbol::Variant2(__tok0), + _ => unreachable!(), + }, + 33 => match __token { + Token::FloatLit(__tok0) if true => __Symbol::Variant3(__tok0), + _ => unreachable!(), + }, + 37 => match __token { + Token::ProseBlock(__tok0) if true => __Symbol::Variant4(__tok0), + _ => unreachable!(), + }, + _ => unreachable!(), + } + } + fn __simulate_reduce< + >( + __reduce_index: i16, + _: core::marker::PhantomData<()>, + ) -> __state_machine::SimulatedReduce<__StateMachine<>> + { + match __reduce_index { + 0 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 0, + } + } + 1 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 0, + } + } + 2 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 1, + } + } + 3 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 2, + } + } + 4 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 2, + } + } + 5 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 3, + } + } + 6 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 3, + } + } + 7 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 4, + } + } + 8 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 5, + } + } + 9 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 5, + } + } + 10 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 6, + } + } + 11 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 7, + } + } + 12 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 7, + } + } + 13 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 8, + } + } + 14 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 8, + } + } + 15 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 9, + } + } + 16 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 10, + } + } + 17 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 10, + } + } + 18 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 11, + } + } + 19 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 11, + } + } + 20 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 12, + } + } + 21 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 13, + } + } + 22 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 13, + } + } + 23 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 14, + } + } + 24 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 14, + } + } + 25 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 15, + } + } + 26 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 15, + } + } + 27 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 16, + } + } + 28 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 16, + } + } + 29 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 17, + } + } + 30 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 17, + } + } + 31 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 18, + } + } + 32 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 18, + } + } + 33 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 19, + } + } + 34 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 19, + } + } + 35 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 20, + } + } + 36 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 21, + } + } + 37 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 21, + } + } + 38 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 21, + } + } + 39 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 21, + } + } + 40 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 22, + } + } + 41 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 22, + } + } + 42 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 23, + } + } + 43 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 23, + } + } + 44 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 24, + } + } + 45 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 24, + } + } + 46 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 6, + nonterminal_produced: 24, + } + } + 47 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 24, + } + } + 48 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 25, + } + } + 49 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 25, + } + } + 50 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 25, + } + } + 51 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 25, + } + } + 52 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 26, + } + } + 53 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 26, + } + } + 54 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 26, + } + } + 55 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 26, + } + } + 56 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 27, + } + } + 57 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 27, + } + } + 58 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 27, + } + } + 59 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 27, + } + } + 60 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 28, + } + } + 61 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 28, + } + } + 62 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 28, + } + } + 63 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 29, + } + } + 64 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 29, + } + } + 65 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 29, + } + } + 66 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 29, + } + } + 67 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 29, + } + } + 68 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 29, + } + } + 69 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 29, + } + } + 70 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 29, + } + } + 71 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 29, + } + } + 72 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 29, + } + } + 73 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 29, + } + } + 74 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 30, + } + } + 75 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 30, + } + } + 76 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 31, + } + } + 77 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 31, + } + } + 78 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 32, + } + } + 79 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 33, + } + } + 80 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 34, + } + } + 81 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 35, + } + } + 82 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 36, + } + } + 83 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 36, + } + } + 84 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 37, + } + } + 85 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 37, + } + } + 86 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 38, + } + } + 87 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 38, + } + } + 88 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 39, + } + } + 89 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 39, + } + } + 90 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 40, + } + } + 91 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 40, + } + } + 92 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 41, + } + } + 93 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 41, + } + } + 94 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 42, + } + } + 95 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 43, + } + } + 96 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 43, + } + } + 97 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 44, + } + } + 98 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 44, + } + } + 99 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 45, + } + } + 100 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 45, + } + } + 101 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 45, + } + } + 102 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 45, + } + } + 103 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 46, + } + } + 104 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 46, + } + } + 105 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 47, + } + } + 106 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 47, + } + } + 107 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 48, + } + } + 108 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 48, + } + } + 109 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 48, + } + } + 110 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 48, + } + } + 111 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 49, + } + } + 112 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 49, + } + } + 113 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 50, + } + } + 114 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 50, + } + } + 115 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 51, + } + } + 116 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 51, + } + } + 117 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 52, + } + } + 118 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 52, + } + } + 119 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 53, + } + } + 120 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 53, + } + } + 121 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 54, + } + } + 122 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 54, + } + } + 123 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 55, + } + } + 124 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 55, + } + } + 125 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 55, + } + } + 126 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 56, + } + } + 127 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 56, + } + } + 128 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 57, + } + } + 129 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 57, + } + } + 130 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 58, + } + } + 131 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 58, + } + } + 132 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 58, + } + } + 133 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 58, + } + } + 134 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 58, + } + } + 135 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 58, + } + } + 136 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 58, + } + } + 137 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 58, + } + } + 138 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 59, + } + } + 139 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 59, + } + } + 140 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 60, + } + } + 141 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 61, + } + } + 142 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 61, + } + } + 143 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 62, + } + } + 144 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 62, + } + } + 145 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 62, + } + } + 146 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 62, + } + } + 147 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 63, + } + } + 148 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 64, + } + } + 149 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 6, + nonterminal_produced: 64, + } + } + 150 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 65, + } + } + 151 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 65, + } + } + 152 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 66, + } + } + 153 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 67, + } + } + 154 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 67, + } + } + 155 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 68, + } + } + 156 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 68, + } + } + 157 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 69, + } + } + 158 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 70, + } + } + 159 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 70, + } + } + 160 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 71, + } + } + 161 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 71, + } + } + 162 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 72, + } + } + 163 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 73, + } + } + 164 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 73, + } + } + 165 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 74, + } + } + 166 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 75, + } + } + 167 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 6, + nonterminal_produced: 75, + } + } + 168 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 6, + nonterminal_produced: 75, + } + } + 169 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 7, + nonterminal_produced: 75, + } + } + 170 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 75, + } + } + 171 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 75, + } + } + 172 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 75, + } + } + 173 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 6, + nonterminal_produced: 75, + } + } + 174 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 76, + } + } + 175 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 76, + } + } + 176 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 77, + } + } + 177 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 77, + } + } + 178 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 78, + } + } + 179 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 4, + nonterminal_produced: 79, + } + } + 180 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 80, + } + } + 181 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 80, + } + } + 182 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 81, + } + } + 183 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 81, + } + } + 184 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 82, + } + } + 185 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 7, + nonterminal_produced: 82, + } + } + 186 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 5, + nonterminal_produced: 82, + } + } + 187 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 83, + } + } + 188 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 83, + } + } + 189 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 83, + } + } + 190 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 83, + } + } + 191 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 83, + } + } + 192 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 83, + } + } + 193 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 83, + } + } + 194 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 83, + } + } + 195 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 83, + } + } + 196 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 83, + } + } + 197 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 83, + } + } + 198 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 2, + nonterminal_produced: 83, + } + } + 199 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 3, + nonterminal_produced: 83, + } + } + 200 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 83, + } + } + 201 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 1, + nonterminal_produced: 84, + } + } + 202 => { + __state_machine::SimulatedReduce::Reduce { + states_to_pop: 0, + nonterminal_produced: 84, + } + } + 203 => __state_machine::SimulatedReduce::Accept, + _ => panic!("invalid reduction index {}", __reduce_index) + } + } + pub struct FileParser { + _priv: (), + } + + impl Default for FileParser { fn default() -> Self { Self::new() } } + impl FileParser { + pub fn new() -> FileParser { + FileParser { + _priv: (), + } + } + + #[allow(dead_code)] + pub fn parse< + __TOKEN: __ToTriple<>, + __TOKENS: IntoIterator, + >( + &self, + __tokens0: __TOKENS, + ) -> Result> + { + let __tokens = __tokens0.into_iter(); + let mut __tokens = __tokens.map(|t| __ToTriple::to_triple(t)); + __state_machine::Parser::drive( + __StateMachine { + __phantom: core::marker::PhantomData::<()>, + }, + __tokens, + ) + } + } + fn __accepts< + >( + __error_state: Option, + __states: &[i16], + __opt_integer: Option, + _: core::marker::PhantomData<()>, + ) -> bool + { + let mut __states = __states.to_vec(); + __states.extend(__error_state); + loop { + let mut __states_len = __states.len(); + let __top = __states[__states_len - 1]; + let __action = match __opt_integer { + None => __EOF_ACTION[__top as usize], + Some(__integer) => __action(__top, __integer), + }; + if __action == 0 { return false; } + if __action > 0 { return true; } + let (__to_pop, __nt) = match __simulate_reduce(-(__action + 1), core::marker::PhantomData::<()>) { + __state_machine::SimulatedReduce::Reduce { + states_to_pop, nonterminal_produced + } => (states_to_pop, nonterminal_produced), + __state_machine::SimulatedReduce::Accept => return true, + }; + __states_len -= __to_pop; + __states.truncate(__states_len); + let __top = __states[__states_len - 1]; + let __next_state = __goto(__top, __nt); + __states.push(__next_state); + } + } + fn __reduce< + >( + __action: i16, + __lookahead_start: Option<&usize>, + __states: &mut alloc::vec::Vec, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> Option>> + { + let (__pop_states, __nonterminal) = match __action { + 0 => { + __reduce0(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 1 => { + __reduce1(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 2 => { + __reduce2(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 3 => { + __reduce3(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 4 => { + __reduce4(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 5 => { + __reduce5(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 6 => { + __reduce6(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 7 => { + __reduce7(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 8 => { + __reduce8(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 9 => { + __reduce9(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 10 => { + __reduce10(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 11 => { + __reduce11(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 12 => { + __reduce12(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 13 => { + __reduce13(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 14 => { + __reduce14(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 15 => { + __reduce15(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 16 => { + __reduce16(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 17 => { + __reduce17(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 18 => { + __reduce18(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 19 => { + __reduce19(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 20 => { + __reduce20(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 21 => { + __reduce21(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 22 => { + __reduce22(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 23 => { + __reduce23(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 24 => { + __reduce24(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 25 => { + __reduce25(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 26 => { + __reduce26(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 27 => { + __reduce27(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 28 => { + __reduce28(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 29 => { + __reduce29(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 30 => { + __reduce30(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 31 => { + __reduce31(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 32 => { + __reduce32(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 33 => { + __reduce33(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 34 => { + __reduce34(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 35 => { + __reduce35(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 36 => { + __reduce36(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 37 => { + __reduce37(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 38 => { + __reduce38(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 39 => { + __reduce39(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 40 => { + __reduce40(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 41 => { + __reduce41(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 42 => { + __reduce42(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 43 => { + __reduce43(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 44 => { + __reduce44(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 45 => { + __reduce45(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 46 => { + __reduce46(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 47 => { + __reduce47(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 48 => { + __reduce48(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 49 => { + __reduce49(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 50 => { + __reduce50(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 51 => { + __reduce51(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 52 => { + __reduce52(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 53 => { + __reduce53(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 54 => { + __reduce54(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 55 => { + __reduce55(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 56 => { + __reduce56(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 57 => { + __reduce57(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 58 => { + __reduce58(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 59 => { + __reduce59(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 60 => { + __reduce60(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 61 => { + __reduce61(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 62 => { + __reduce62(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 63 => { + __reduce63(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 64 => { + __reduce64(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 65 => { + __reduce65(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 66 => { + __reduce66(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 67 => { + __reduce67(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 68 => { + __reduce68(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 69 => { + __reduce69(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 70 => { + __reduce70(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 71 => { + __reduce71(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 72 => { + __reduce72(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 73 => { + __reduce73(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 74 => { + __reduce74(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 75 => { + __reduce75(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 76 => { + __reduce76(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 77 => { + __reduce77(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 78 => { + __reduce78(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 79 => { + __reduce79(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 80 => { + __reduce80(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 81 => { + __reduce81(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 82 => { + __reduce82(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 83 => { + __reduce83(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 84 => { + __reduce84(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 85 => { + __reduce85(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 86 => { + __reduce86(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 87 => { + __reduce87(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 88 => { + __reduce88(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 89 => { + __reduce89(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 90 => { + __reduce90(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 91 => { + __reduce91(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 92 => { + __reduce92(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 93 => { + __reduce93(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 94 => { + __reduce94(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 95 => { + __reduce95(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 96 => { + __reduce96(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 97 => { + __reduce97(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 98 => { + __reduce98(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 99 => { + __reduce99(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 100 => { + __reduce100(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 101 => { + __reduce101(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 102 => { + __reduce102(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 103 => { + __reduce103(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 104 => { + __reduce104(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 105 => { + __reduce105(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 106 => { + __reduce106(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 107 => { + __reduce107(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 108 => { + __reduce108(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 109 => { + __reduce109(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 110 => { + __reduce110(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 111 => { + __reduce111(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 112 => { + __reduce112(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 113 => { + __reduce113(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 114 => { + __reduce114(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 115 => { + __reduce115(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 116 => { + __reduce116(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 117 => { + __reduce117(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 118 => { + __reduce118(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 119 => { + __reduce119(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 120 => { + __reduce120(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 121 => { + __reduce121(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 122 => { + __reduce122(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 123 => { + __reduce123(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 124 => { + __reduce124(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 125 => { + __reduce125(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 126 => { + __reduce126(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 127 => { + __reduce127(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 128 => { + __reduce128(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 129 => { + __reduce129(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 130 => { + __reduce130(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 131 => { + __reduce131(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 132 => { + __reduce132(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 133 => { + __reduce133(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 134 => { + __reduce134(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 135 => { + __reduce135(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 136 => { + __reduce136(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 137 => { + __reduce137(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 138 => { + __reduce138(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 139 => { + __reduce139(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 140 => { + __reduce140(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 141 => { + __reduce141(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 142 => { + __reduce142(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 143 => { + __reduce143(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 144 => { + __reduce144(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 145 => { + __reduce145(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 146 => { + __reduce146(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 147 => { + __reduce147(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 148 => { + __reduce148(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 149 => { + __reduce149(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 150 => { + __reduce150(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 151 => { + __reduce151(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 152 => { + __reduce152(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 153 => { + __reduce153(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 154 => { + __reduce154(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 155 => { + __reduce155(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 156 => { + __reduce156(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 157 => { + __reduce157(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 158 => { + __reduce158(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 159 => { + __reduce159(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 160 => { + __reduce160(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 161 => { + __reduce161(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 162 => { + __reduce162(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 163 => { + __reduce163(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 164 => { + __reduce164(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 165 => { + __reduce165(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 166 => { + __reduce166(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 167 => { + __reduce167(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 168 => { + __reduce168(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 169 => { + __reduce169(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 170 => { + __reduce170(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 171 => { + __reduce171(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 172 => { + __reduce172(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 173 => { + __reduce173(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 174 => { + __reduce174(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 175 => { + __reduce175(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 176 => { + __reduce176(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 177 => { + __reduce177(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 178 => { + __reduce178(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 179 => { + __reduce179(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 180 => { + __reduce180(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 181 => { + __reduce181(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 182 => { + __reduce182(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 183 => { + __reduce183(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 184 => { + __reduce184(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 185 => { + __reduce185(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 186 => { + __reduce186(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 187 => { + __reduce187(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 188 => { + __reduce188(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 189 => { + __reduce189(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 190 => { + __reduce190(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 191 => { + __reduce191(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 192 => { + __reduce192(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 193 => { + __reduce193(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 194 => { + __reduce194(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 195 => { + __reduce195(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 196 => { + __reduce196(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 197 => { + __reduce197(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 198 => { + __reduce198(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 199 => { + __reduce199(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 200 => { + __reduce200(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 201 => { + __reduce201(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 202 => { + __reduce202(__lookahead_start, __symbols, core::marker::PhantomData::<()>) + } + 203 => { + // __File = File => ActionFn(0); + let __sym0 = __pop_Variant28(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action0::<>(__sym0); + return Some(Ok(__nt)); + } + _ => panic!("invalid action code {}", __action) + }; + let __states_len = __states.len(); + __states.truncate(__states_len - __pop_states); + let __state = *__states.last().unwrap(); + let __next_state = __goto(__state, __nonterminal); + __states.push(__next_state); + None + } + #[inline(never)] + fn __symbol_type_mismatch() -> ! { + panic!("symbol type mismatch") + } + fn __pop_Variant14< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, ArcState, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant14(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant16< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Behavior, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant16(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant12< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, BehaviorNode, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant12(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant19< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Character, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant19(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant29< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, CompOp, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant29(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant23< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Declaration, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant23(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant25< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Duration, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant25(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant26< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, EnumDecl, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant26(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant13< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Expr, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant13(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant8< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Field, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant8(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant28< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, File, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant28(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant30< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Institution, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant30(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant31< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, LifeArc, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant31(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant32< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Location, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant32(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant27< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Option, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant27(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant7< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Option, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant7(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant5< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Option, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant5(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant50< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Option, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant50(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant33< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Option>, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant33(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant45< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Option>, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant45(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant34< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Override, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant34(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant35< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, OverrideOp, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant35(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant37< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Participant, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant37(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant4< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, ProseBlock, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant4(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant39< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Relationship, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant39(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant40< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Schedule, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant40(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant41< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, ScheduleBlock, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant41(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant43< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Species, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant43(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant1< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, String, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant1(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant44< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Template, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant44(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant46< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Time, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant46(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant0< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Token, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant0(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant47< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Transition, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant47(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant49< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, UseDecl, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant49(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant10< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Value, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant10(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant20< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Vec, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant20(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant21< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Vec, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant21(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant22< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, Vec, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant22(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant15< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, alloc::vec::Vec, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant15(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant17< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, alloc::vec::Vec, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant17(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant24< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, alloc::vec::Vec, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant24(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant9< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, alloc::vec::Vec, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant9(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant36< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, alloc::vec::Vec, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant36(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant38< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, alloc::vec::Vec, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant38(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant42< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, alloc::vec::Vec, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant42(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant6< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, alloc::vec::Vec, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant6(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant48< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, alloc::vec::Vec, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant48(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant11< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, alloc::vec::Vec, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant11(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant18< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, bool, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant18(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant3< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, f64, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant3(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant2< + >( + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)> + ) -> (usize, i64, usize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant2(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __reduce0< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // "strict"? = "strict" => ActionFn(116); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action116::<>(__sym0); + __symbols.push((__start, __Symbol::Variant5(__nt), __end)); + (1, 0) + } + fn __reduce1< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // "strict"? = => ActionFn(117); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action117::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant5(__nt), __end)); + (0, 0) + } + fn __reduce2< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ("," ) = ",", Ident => ActionFn(120); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action120::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant1(__nt), __end)); + (2, 1) + } + fn __reduce3< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ("," )* = => ActionFn(118); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action118::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (0, 2) + } + fn __reduce4< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ("," )* = ("," )+ => ActionFn(119); + let __sym0 = __pop_Variant6(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action119::<>(__sym0); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (1, 2) + } + fn __reduce5< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ("," )+ = ",", Ident => ActionFn(167); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action167::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (2, 3) + } + fn __reduce6< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ("," )+ = ("," )+, ",", Ident => ActionFn(168); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant1(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant6(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action168::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (3, 3) + } + fn __reduce7< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ("as" ) = "as", Ident => ActionFn(99); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action99::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant1(__nt), __end)); + (2, 4) + } + fn __reduce8< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ("as" )? = "as", Ident => ActionFn(171); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action171::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant7(__nt), __end)); + (2, 5) + } + fn __reduce9< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ("as" )? = => ActionFn(98); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action98::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant7(__nt), __end)); + (0, 5) + } + fn __reduce10< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ( ",") = Field, "," => ActionFn(158); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant8(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action158::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant8(__nt), __end)); + (2, 6) + } + fn __reduce11< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ( ",")* = => ActionFn(156); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action156::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant9(__nt), __end)); + (0, 7) + } + fn __reduce12< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ( ",")* = ( ",")+ => ActionFn(157); + let __sym0 = __pop_Variant9(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action157::<>(__sym0); + __symbols.push((__start, __Symbol::Variant9(__nt), __end)); + (1, 7) + } + fn __reduce13< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ( ",")+ = Field, "," => ActionFn(174); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant8(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action174::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant9(__nt), __end)); + (2, 8) + } + fn __reduce14< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ( ",")+ = ( ",")+, Field, "," => ActionFn(175); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant8(__symbols); + let __sym0 = __pop_Variant9(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action175::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant9(__nt), __end)); + (3, 8) + } + fn __reduce15< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ( ",") = Ident, "," => ActionFn(134); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant1(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action134::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant1(__nt), __end)); + (2, 9) + } + fn __reduce16< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ( ",")* = => ActionFn(132); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action132::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (0, 10) + } + fn __reduce17< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ( ",")* = ( ",")+ => ActionFn(133); + let __sym0 = __pop_Variant6(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action133::<>(__sym0); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (1, 10) + } + fn __reduce18< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ( ",")+ = Ident, "," => ActionFn(178); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant1(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action178::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (2, 11) + } + fn __reduce19< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ( ",")+ = ( ",")+, Ident, "," => ActionFn(179); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant6(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action179::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (3, 11) + } + fn __reduce20< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ( ",") = Value, "," => ActionFn(145); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant10(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action145::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant10(__nt), __end)); + (2, 12) + } + fn __reduce21< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ( ",")* = => ActionFn(143); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action143::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant11(__nt), __end)); + (0, 13) + } + fn __reduce22< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ( ",")* = ( ",")+ => ActionFn(144); + let __sym0 = __pop_Variant11(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action144::<>(__sym0); + __symbols.push((__start, __Symbol::Variant11(__nt), __end)); + (1, 13) + } + fn __reduce23< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ( ",")+ = Value, "," => ActionFn(182); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant10(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action182::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant11(__nt), __end)); + (2, 14) + } + fn __reduce24< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ( ",")+ = ( ",")+, Value, "," => ActionFn(183); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant10(__symbols); + let __sym0 = __pop_Variant11(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action183::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant11(__nt), __end)); + (3, 14) + } + fn __reduce25< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ActionNode = Ident, "(", Comma, ")" => ActionFn(58); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant20(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant1(__symbols); + let __start = __sym0.0; + let __end = __sym3.2; + let __nt = super::__action58::<>(__sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant12(__nt), __end)); + (4, 15) + } + fn __reduce26< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ActionNode = Ident => ActionFn(59); + let __sym0 = __pop_Variant1(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action59::<>(__sym0); + __symbols.push((__start, __Symbol::Variant12(__nt), __end)); + (1, 15) + } + fn __reduce27< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // AndExpr = AndExpr, "and", NotExpr => ActionFn(72); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant13(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant13(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action72::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (3, 16) + } + fn __reduce28< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // AndExpr = NotExpr => ActionFn(73); + let __sym0 = __pop_Variant13(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action73::<>(__sym0); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (1, 16) + } + fn __reduce29< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ArcState = "state", Ident, "{", "}" => ActionFn(246); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym3.2; + let __nt = super::__action246::<>(__sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant14(__nt), __end)); + (4, 17) + } + fn __reduce30< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ArcState = "state", Ident, "{", Transition+, "}" => ActionFn(247); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant0(__symbols); + let __sym3 = __pop_Variant48(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym4.2; + let __nt = super::__action247::<>(__sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant14(__nt), __end)); + (5, 17) + } + fn __reduce31< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ArcState* = => ActionFn(109); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action109::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant15(__nt), __end)); + (0, 18) + } + fn __reduce32< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ArcState* = ArcState+ => ActionFn(110); + let __sym0 = __pop_Variant15(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action110::<>(__sym0); + __symbols.push((__start, __Symbol::Variant15(__nt), __end)); + (1, 18) + } + fn __reduce33< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ArcState+ = ArcState => ActionFn(148); + let __sym0 = __pop_Variant14(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action148::<>(__sym0); + __symbols.push((__start, __Symbol::Variant15(__nt), __end)); + (1, 19) + } + fn __reduce34< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ArcState+ = ArcState+, ArcState => ActionFn(149); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant14(__symbols); + let __sym0 = __pop_Variant15(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action149::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant15(__nt), __end)); + (2, 19) + } + fn __reduce35< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Behavior = "behavior", Ident, "{", BehaviorNode, "}" => ActionFn(51); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant0(__symbols); + let __sym3 = __pop_Variant12(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym4.2; + let __nt = super::__action51::<>(__sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant16(__nt), __end)); + (5, 20) + } + fn __reduce36< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // BehaviorNode = SelectorNode => ActionFn(52); + let __sym0 = __pop_Variant12(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action52::<>(__sym0); + __symbols.push((__start, __Symbol::Variant12(__nt), __end)); + (1, 21) + } + fn __reduce37< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // BehaviorNode = SequenceNode => ActionFn(53); + let __sym0 = __pop_Variant12(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action53::<>(__sym0); + __symbols.push((__start, __Symbol::Variant12(__nt), __end)); + (1, 21) + } + fn __reduce38< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // BehaviorNode = ActionNode => ActionFn(54); + let __sym0 = __pop_Variant12(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action54::<>(__sym0); + __symbols.push((__start, __Symbol::Variant12(__nt), __end)); + (1, 21) + } + fn __reduce39< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // BehaviorNode = SubTreeNode => ActionFn(55); + let __sym0 = __pop_Variant12(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action55::<>(__sym0); + __symbols.push((__start, __Symbol::Variant12(__nt), __end)); + (1, 21) + } + fn __reduce40< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // BehaviorNode+ = BehaviorNode => ActionFn(103); + let __sym0 = __pop_Variant12(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action103::<>(__sym0); + __symbols.push((__start, __Symbol::Variant17(__nt), __end)); + (1, 22) + } + fn __reduce41< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // BehaviorNode+ = BehaviorNode+, BehaviorNode => ActionFn(104); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant12(__symbols); + let __sym0 = __pop_Variant17(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action104::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant17(__nt), __end)); + (2, 22) + } + fn __reduce42< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // BoolLit = "true" => ActionFn(37); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action37::<>(__sym0); + __symbols.push((__start, __Symbol::Variant18(__nt), __end)); + (1, 23) + } + fn __reduce43< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // BoolLit = "false" => ActionFn(38); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action38::<>(__sym0); + __symbols.push((__start, __Symbol::Variant18(__nt), __end)); + (1, 23) + } + fn __reduce44< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Character = "character", Ident, TemplateClause, "{", "}" => ActionFn(242); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant0(__symbols); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant21(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym4.2; + let __nt = super::__action242::<>(__sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant19(__nt), __end)); + (5, 24) + } + fn __reduce45< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Character = "character", Ident, "{", "}" => ActionFn(243); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym3.2; + let __nt = super::__action243::<>(__sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant19(__nt), __end)); + (4, 24) + } + fn __reduce46< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Character = "character", Ident, TemplateClause, "{", Field+, "}" => ActionFn(244); + assert!(__symbols.len() >= 6); + let __sym5 = __pop_Variant0(__symbols); + let __sym4 = __pop_Variant9(__symbols); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant21(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym5.2; + let __nt = super::__action244::<>(__sym0, __sym1, __sym2, __sym3, __sym4, __sym5); + __symbols.push((__start, __Symbol::Variant19(__nt), __end)); + (6, 24) + } + fn __reduce47< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Character = "character", Ident, "{", Field+, "}" => ActionFn(245); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant0(__symbols); + let __sym3 = __pop_Variant9(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym4.2; + let __nt = super::__action245::<>(__sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant19(__nt), __end)); + (5, 24) + } + fn __reduce48< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Comma = Field => ActionFn(210); + let __sym0 = __pop_Variant8(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action210::<>(__sym0); + __symbols.push((__start, __Symbol::Variant20(__nt), __end)); + (1, 25) + } + fn __reduce49< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Comma = => ActionFn(211); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action211::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant20(__nt), __end)); + (0, 25) + } + fn __reduce50< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Comma = ( ",")+, Field => ActionFn(212); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant8(__symbols); + let __sym0 = __pop_Variant9(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action212::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant20(__nt), __end)); + (2, 25) + } + fn __reduce51< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Comma = ( ",")+ => ActionFn(213); + let __sym0 = __pop_Variant9(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action213::<>(__sym0); + __symbols.push((__start, __Symbol::Variant20(__nt), __end)); + (1, 25) + } + fn __reduce52< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Comma = Ident => ActionFn(214); + let __sym0 = __pop_Variant1(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action214::<>(__sym0); + __symbols.push((__start, __Symbol::Variant21(__nt), __end)); + (1, 26) + } + fn __reduce53< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Comma = => ActionFn(215); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action215::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant21(__nt), __end)); + (0, 26) + } + fn __reduce54< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Comma = ( ",")+, Ident => ActionFn(216); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant6(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action216::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant21(__nt), __end)); + (2, 26) + } + fn __reduce55< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Comma = ( ",")+ => ActionFn(217); + let __sym0 = __pop_Variant6(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action217::<>(__sym0); + __symbols.push((__start, __Symbol::Variant21(__nt), __end)); + (1, 26) + } + fn __reduce56< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Comma = Value => ActionFn(248); + let __sym0 = __pop_Variant10(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action248::<>(__sym0); + __symbols.push((__start, __Symbol::Variant22(__nt), __end)); + (1, 27) + } + fn __reduce57< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Comma = => ActionFn(249); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action249::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant22(__nt), __end)); + (0, 27) + } + fn __reduce58< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Comma = ( ",")+, Value => ActionFn(250); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant10(__symbols); + let __sym0 = __pop_Variant11(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action250::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant22(__nt), __end)); + (2, 27) + } + fn __reduce59< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Comma = ( ",")+ => ActionFn(251); + let __sym0 = __pop_Variant11(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action251::<>(__sym0); + __symbols.push((__start, __Symbol::Variant22(__nt), __end)); + (1, 27) + } + fn __reduce60< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ComparisonExpr = FieldAccessExpr, "is", FieldAccessExpr => ActionFn(76); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant13(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant13(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action76::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (3, 28) + } + fn __reduce61< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ComparisonExpr = FieldAccessExpr, InequalityOp, FieldAccessExpr => ActionFn(77); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant13(__symbols); + let __sym1 = __pop_Variant29(__symbols); + let __sym0 = __pop_Variant13(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action77::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (3, 28) + } + fn __reduce62< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ComparisonExpr = FieldAccessExpr => ActionFn(78); + let __sym0 = __pop_Variant13(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action78::<>(__sym0); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (1, 28) + } + fn __reduce63< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Declaration = UseDecl => ActionFn(2); + let __sym0 = __pop_Variant49(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action2::<>(__sym0); + __symbols.push((__start, __Symbol::Variant23(__nt), __end)); + (1, 29) + } + fn __reduce64< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Declaration = Character => ActionFn(3); + let __sym0 = __pop_Variant19(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action3::<>(__sym0); + __symbols.push((__start, __Symbol::Variant23(__nt), __end)); + (1, 29) + } + fn __reduce65< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Declaration = Template => ActionFn(4); + let __sym0 = __pop_Variant44(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action4::<>(__sym0); + __symbols.push((__start, __Symbol::Variant23(__nt), __end)); + (1, 29) + } + fn __reduce66< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Declaration = LifeArc => ActionFn(5); + let __sym0 = __pop_Variant31(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action5::<>(__sym0); + __symbols.push((__start, __Symbol::Variant23(__nt), __end)); + (1, 29) + } + fn __reduce67< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Declaration = Schedule => ActionFn(6); + let __sym0 = __pop_Variant40(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action6::<>(__sym0); + __symbols.push((__start, __Symbol::Variant23(__nt), __end)); + (1, 29) + } + fn __reduce68< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Declaration = Behavior => ActionFn(7); + let __sym0 = __pop_Variant16(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action7::<>(__sym0); + __symbols.push((__start, __Symbol::Variant23(__nt), __end)); + (1, 29) + } + fn __reduce69< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Declaration = Institution => ActionFn(8); + let __sym0 = __pop_Variant30(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action8::<>(__sym0); + __symbols.push((__start, __Symbol::Variant23(__nt), __end)); + (1, 29) + } + fn __reduce70< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Declaration = Relationship => ActionFn(9); + let __sym0 = __pop_Variant39(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action9::<>(__sym0); + __symbols.push((__start, __Symbol::Variant23(__nt), __end)); + (1, 29) + } + fn __reduce71< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Declaration = Location => ActionFn(10); + let __sym0 = __pop_Variant32(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action10::<>(__sym0); + __symbols.push((__start, __Symbol::Variant23(__nt), __end)); + (1, 29) + } + fn __reduce72< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Declaration = Species => ActionFn(11); + let __sym0 = __pop_Variant43(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action11::<>(__sym0); + __symbols.push((__start, __Symbol::Variant23(__nt), __end)); + (1, 29) + } + fn __reduce73< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Declaration = EnumDecl => ActionFn(12); + let __sym0 = __pop_Variant26(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action12::<>(__sym0); + __symbols.push((__start, __Symbol::Variant23(__nt), __end)); + (1, 29) + } + fn __reduce74< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Declaration* = => ActionFn(126); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action126::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant24(__nt), __end)); + (0, 30) + } + fn __reduce75< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Declaration* = Declaration+ => ActionFn(127); + let __sym0 = __pop_Variant24(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action127::<>(__sym0); + __symbols.push((__start, __Symbol::Variant24(__nt), __end)); + (1, 30) + } + fn __reduce76< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Declaration+ = Declaration => ActionFn(128); + let __sym0 = __pop_Variant23(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action128::<>(__sym0); + __symbols.push((__start, __Symbol::Variant24(__nt), __end)); + (1, 31) + } + fn __reduce77< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Declaration+ = Declaration+, Declaration => ActionFn(129); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant23(__symbols); + let __sym0 = __pop_Variant24(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action129::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant24(__nt), __end)); + (2, 31) + } + fn __reduce78< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Duration = DurationLit => ActionFn(40); + let __sym0 = __pop_Variant1(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action40::<>(__sym0); + __symbols.push((__start, __Symbol::Variant25(__nt), __end)); + (1, 32) + } + fn __reduce79< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // EnumDecl = "enum", Ident, "{", Comma, "}" => ActionFn(68); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant0(__symbols); + let __sym3 = __pop_Variant21(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym4.2; + let __nt = super::__action68::<>(__sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant26(__nt), __end)); + (5, 33) + } + fn __reduce80< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Expr = OrExpr => ActionFn(69); + let __sym0 = __pop_Variant13(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action69::<>(__sym0); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (1, 34) + } + fn __reduce81< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Field = Ident, ":", Value => ActionFn(23); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant10(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant1(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action23::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant8(__nt), __end)); + (3, 35) + } + fn __reduce82< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Field* = => ActionFn(121); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action121::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant9(__nt), __end)); + (0, 36) + } + fn __reduce83< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Field* = Field+ => ActionFn(122); + let __sym0 = __pop_Variant9(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action122::<>(__sym0); + __symbols.push((__start, __Symbol::Variant9(__nt), __end)); + (1, 36) + } + fn __reduce84< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Field+ = Field => ActionFn(135); + let __sym0 = __pop_Variant8(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action135::<>(__sym0); + __symbols.push((__start, __Symbol::Variant9(__nt), __end)); + (1, 37) + } + fn __reduce85< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Field+ = Field+, Field => ActionFn(136); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant8(__symbols); + let __sym0 = __pop_Variant9(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action136::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant9(__nt), __end)); + (2, 37) + } + fn __reduce86< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Field? = Field => ActionFn(154); + let __sym0 = __pop_Variant8(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action154::<>(__sym0); + __symbols.push((__start, __Symbol::Variant27(__nt), __end)); + (1, 38) + } + fn __reduce87< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Field? = => ActionFn(155); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action155::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant27(__nt), __end)); + (0, 38) + } + fn __reduce88< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // FieldAccessExpr = FieldAccessExpr, ".", Ident => ActionFn(79); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant1(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant13(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action79::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (3, 39) + } + fn __reduce89< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // FieldAccessExpr = PrimaryExpr => ActionFn(80); + let __sym0 = __pop_Variant13(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action80::<>(__sym0); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (1, 39) + } + fn __reduce90< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // File = => ActionFn(188); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action188::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant28(__nt), __end)); + (0, 40) + } + fn __reduce91< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // File = Declaration+ => ActionFn(189); + let __sym0 = __pop_Variant24(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action189::<>(__sym0); + __symbols.push((__start, __Symbol::Variant28(__nt), __end)); + (1, 40) + } + fn __reduce92< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Ident? = Ident => ActionFn(130); + let __sym0 = __pop_Variant1(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action130::<>(__sym0); + __symbols.push((__start, __Symbol::Variant7(__nt), __end)); + (1, 41) + } + fn __reduce93< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Ident? = => ActionFn(131); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action131::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant7(__nt), __end)); + (0, 41) + } + fn __reduce94< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Include = "include", Ident => ActionFn(22); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action22::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant1(__nt), __end)); + (2, 42) + } + fn __reduce95< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Include* = => ActionFn(114); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action114::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (0, 43) + } + fn __reduce96< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Include* = Include+ => ActionFn(115); + let __sym0 = __pop_Variant6(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action115::<>(__sym0); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (1, 43) + } + fn __reduce97< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Include+ = Include => ActionFn(139); + let __sym0 = __pop_Variant1(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action139::<>(__sym0); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (1, 44) + } + fn __reduce98< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Include+ = Include+, Include => ActionFn(140); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant6(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action140::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant6(__nt), __end)); + (2, 44) + } + fn __reduce99< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // InequalityOp = ">" => ActionFn(85); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action85::<>(__sym0); + __symbols.push((__start, __Symbol::Variant29(__nt), __end)); + (1, 45) + } + fn __reduce100< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // InequalityOp = ">=" => ActionFn(86); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action86::<>(__sym0); + __symbols.push((__start, __Symbol::Variant29(__nt), __end)); + (1, 45) + } + fn __reduce101< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // InequalityOp = "<" => ActionFn(87); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action87::<>(__sym0); + __symbols.push((__start, __Symbol::Variant29(__nt), __end)); + (1, 45) + } + fn __reduce102< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // InequalityOp = "<=" => ActionFn(88); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action88::<>(__sym0); + __symbols.push((__start, __Symbol::Variant29(__nt), __end)); + (1, 45) + } + fn __reduce103< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Institution = "institution", Ident, "{", "}" => ActionFn(192); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym3.2; + let __nt = super::__action192::<>(__sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant30(__nt), __end)); + (4, 46) + } + fn __reduce104< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Institution = "institution", Ident, "{", Field+, "}" => ActionFn(193); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant0(__symbols); + let __sym3 = __pop_Variant9(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym4.2; + let __nt = super::__action193::<>(__sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant30(__nt), __end)); + (5, 46) + } + fn __reduce105< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // LifeArc = "life_arc", Ident, "{", "}" => ActionFn(186); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym3.2; + let __nt = super::__action186::<>(__sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant31(__nt), __end)); + (4, 47) + } + fn __reduce106< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // LifeArc = "life_arc", Ident, "{", ArcState+, "}" => ActionFn(187); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant0(__symbols); + let __sym3 = __pop_Variant15(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym4.2; + let __nt = super::__action187::<>(__sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant31(__nt), __end)); + (5, 47) + } + fn __reduce107< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Literal = IntLit => ActionFn(89); + let __sym0 = __pop_Variant2(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action89::<>(__sym0); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (1, 48) + } + fn __reduce108< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Literal = FloatLit => ActionFn(90); + let __sym0 = __pop_Variant3(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action90::<>(__sym0); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (1, 48) + } + fn __reduce109< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Literal = StringLit => ActionFn(91); + let __sym0 = __pop_Variant1(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action91::<>(__sym0); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (1, 48) + } + fn __reduce110< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Literal = BoolLit => ActionFn(92); + let __sym0 = __pop_Variant18(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action92::<>(__sym0); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (1, 48) + } + fn __reduce111< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Location = "location", Ident, "{", "}" => ActionFn(194); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym3.2; + let __nt = super::__action194::<>(__sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant32(__nt), __end)); + (4, 49) + } + fn __reduce112< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Location = "location", Ident, "{", Field+, "}" => ActionFn(195); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant0(__symbols); + let __sym3 = __pop_Variant9(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym4.2; + let __nt = super::__action195::<>(__sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant32(__nt), __end)); + (5, 49) + } + fn __reduce113< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // NotExpr = "not", NotExpr => ActionFn(74); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant13(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action74::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (2, 50) + } + fn __reduce114< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // NotExpr = ComparisonExpr => ActionFn(75); + let __sym0 = __pop_Variant13(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action75::<>(__sym0); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (1, 50) + } + fn __reduce115< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // OrExpr = OrExpr, "or", AndExpr => ActionFn(70); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant13(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant13(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action70::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (3, 51) + } + fn __reduce116< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // OrExpr = AndExpr => ActionFn(71); + let __sym0 = __pop_Variant13(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action71::<>(__sym0); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (1, 51) + } + fn __reduce117< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // OtherBlock = "other", "{", "}" => ActionFn(196); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action196::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant20(__nt), __end)); + (3, 52) + } + fn __reduce118< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // OtherBlock = "other", "{", Field+, "}" => ActionFn(197); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant9(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym3.2; + let __nt = super::__action197::<>(__sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant20(__nt), __end)); + (4, 52) + } + fn __reduce119< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // OtherBlock? = OtherBlock => ActionFn(93); + let __sym0 = __pop_Variant20(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action93::<>(__sym0); + __symbols.push((__start, __Symbol::Variant33(__nt), __end)); + (1, 53) + } + fn __reduce120< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // OtherBlock? = => ActionFn(94); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action94::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant33(__nt), __end)); + (0, 53) + } + fn __reduce121< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Override = "@", Path, "{", "}" => ActionFn(230); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant21(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym3.2; + let __nt = super::__action230::<>(__sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant34(__nt), __end)); + (4, 54) + } + fn __reduce122< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Override = "@", Path, "{", OverrideOp+, "}" => ActionFn(231); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant0(__symbols); + let __sym3 = __pop_Variant36(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant21(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym4.2; + let __nt = super::__action231::<>(__sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant34(__nt), __end)); + (5, 54) + } + fn __reduce123< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // OverrideOp = "remove", Ident => ActionFn(43); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action43::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant35(__nt), __end)); + (2, 55) + } + fn __reduce124< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // OverrideOp = "append", Field => ActionFn(44); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant8(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action44::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant35(__nt), __end)); + (2, 55) + } + fn __reduce125< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // OverrideOp = Field => ActionFn(45); + let __sym0 = __pop_Variant8(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action45::<>(__sym0); + __symbols.push((__start, __Symbol::Variant35(__nt), __end)); + (1, 55) + } + fn __reduce126< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // OverrideOp* = => ActionFn(111); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action111::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant36(__nt), __end)); + (0, 56) + } + fn __reduce127< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // OverrideOp* = OverrideOp+ => ActionFn(112); + let __sym0 = __pop_Variant36(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action112::<>(__sym0); + __symbols.push((__start, __Symbol::Variant36(__nt), __end)); + (1, 56) + } + fn __reduce128< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // OverrideOp+ = OverrideOp => ActionFn(146); + let __sym0 = __pop_Variant35(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action146::<>(__sym0); + __symbols.push((__start, __Symbol::Variant36(__nt), __end)); + (1, 57) + } + fn __reduce129< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // OverrideOp+ = OverrideOp+, OverrideOp => ActionFn(147); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant35(__symbols); + let __sym0 = __pop_Variant36(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action147::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant36(__nt), __end)); + (2, 57) + } + fn __reduce130< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Participant = Path, "as", Ident, SelfBlock, OtherBlock => ActionFn(234); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant20(__symbols); + let __sym3 = __pop_Variant20(__symbols); + let __sym2 = __pop_Variant1(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant21(__symbols); + let __start = __sym0.0; + let __end = __sym4.2; + let __nt = super::__action234::<>(__sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant37(__nt), __end)); + (5, 58) + } + fn __reduce131< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Participant = Path, "as", Ident, OtherBlock => ActionFn(235); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant20(__symbols); + let __sym2 = __pop_Variant1(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant21(__symbols); + let __start = __sym0.0; + let __end = __sym3.2; + let __nt = super::__action235::<>(__sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant37(__nt), __end)); + (4, 58) + } + fn __reduce132< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Participant = Path, "as", Ident, SelfBlock => ActionFn(236); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant20(__symbols); + let __sym2 = __pop_Variant1(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant21(__symbols); + let __start = __sym0.0; + let __end = __sym3.2; + let __nt = super::__action236::<>(__sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant37(__nt), __end)); + (4, 58) + } + fn __reduce133< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Participant = Path, "as", Ident => ActionFn(237); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant1(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant21(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action237::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant37(__nt), __end)); + (3, 58) + } + fn __reduce134< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Participant = Path, SelfBlock, OtherBlock => ActionFn(238); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant20(__symbols); + let __sym1 = __pop_Variant20(__symbols); + let __sym0 = __pop_Variant21(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action238::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant37(__nt), __end)); + (3, 58) + } + fn __reduce135< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Participant = Path, OtherBlock => ActionFn(239); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant20(__symbols); + let __sym0 = __pop_Variant21(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action239::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant37(__nt), __end)); + (2, 58) + } + fn __reduce136< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Participant = Path, SelfBlock => ActionFn(240); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant20(__symbols); + let __sym0 = __pop_Variant21(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action240::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant37(__nt), __end)); + (2, 58) + } + fn __reduce137< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Participant = Path => ActionFn(241); + let __sym0 = __pop_Variant21(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action241::<>(__sym0); + __symbols.push((__start, __Symbol::Variant37(__nt), __end)); + (1, 58) + } + fn __reduce138< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Participant+ = Participant => ActionFn(100); + let __sym0 = __pop_Variant37(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action100::<>(__sym0); + __symbols.push((__start, __Symbol::Variant38(__nt), __end)); + (1, 59) + } + fn __reduce139< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Participant+ = Participant+, Participant => ActionFn(101); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant37(__symbols); + let __sym0 = __pop_Variant38(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action101::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant38(__nt), __end)); + (2, 59) + } + fn __reduce140< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Path = PathSegments => ActionFn(16); + let __sym0 = __pop_Variant21(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action16::<>(__sym0); + __symbols.push((__start, __Symbol::Variant21(__nt), __end)); + (1, 60) + } + fn __reduce141< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // PathSegments = Ident => ActionFn(17); + let __sym0 = __pop_Variant1(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action17::<>(__sym0); + __symbols.push((__start, __Symbol::Variant21(__nt), __end)); + (1, 61) + } + fn __reduce142< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // PathSegments = PathSegments, "::", Ident => ActionFn(18); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant1(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant21(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action18::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant21(__nt), __end)); + (3, 61) + } + fn __reduce143< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // PrimaryExpr = "self" => ActionFn(81); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action81::<>(__sym0); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (1, 62) + } + fn __reduce144< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // PrimaryExpr = "other" => ActionFn(82); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action82::<>(__sym0); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (1, 62) + } + fn __reduce145< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // PrimaryExpr = Literal => ActionFn(83); + let __sym0 = __pop_Variant13(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action83::<>(__sym0); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (1, 62) + } + fn __reduce146< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // PrimaryExpr = Path => ActionFn(84); + let __sym0 = __pop_Variant21(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action84::<>(__sym0); + __symbols.push((__start, __Symbol::Variant13(__nt), __end)); + (1, 62) + } + fn __reduce147< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ProseBlock = ProseBlockToken => ActionFn(41); + let __sym0 = __pop_Variant4(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action41::<>(__sym0); + __symbols.push((__start, __Symbol::Variant4(__nt), __end)); + (1, 63) + } + fn __reduce148< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Relationship = "relationship", Ident, "{", Participant+, "}" => ActionFn(198); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant0(__symbols); + let __sym3 = __pop_Variant38(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym4.2; + let __nt = super::__action198::<>(__sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant39(__nt), __end)); + (5, 64) + } + fn __reduce149< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Relationship = "relationship", Ident, "{", Participant+, Field+, "}" => ActionFn(199); + assert!(__symbols.len() >= 6); + let __sym5 = __pop_Variant0(__symbols); + let __sym4 = __pop_Variant9(__symbols); + let __sym3 = __pop_Variant38(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym5.2; + let __nt = super::__action199::<>(__sym0, __sym1, __sym2, __sym3, __sym4, __sym5); + __symbols.push((__start, __Symbol::Variant39(__nt), __end)); + (6, 64) + } + fn __reduce150< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Schedule = "schedule", Ident, "{", "}" => ActionFn(232); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym3.2; + let __nt = super::__action232::<>(__sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant40(__nt), __end)); + (4, 65) + } + fn __reduce151< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Schedule = "schedule", Ident, "{", ScheduleBlock+, "}" => ActionFn(233); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant0(__symbols); + let __sym3 = __pop_Variant42(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym4.2; + let __nt = super::__action233::<>(__sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant40(__nt), __end)); + (5, 65) + } + fn __reduce152< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ScheduleBlock = Time, "->", Time, ":", Ident => ActionFn(50); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant1(__symbols); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant46(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant46(__symbols); + let __start = __sym0.0; + let __end = __sym4.2; + let __nt = super::__action50::<>(__sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant41(__nt), __end)); + (5, 66) + } + fn __reduce153< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ScheduleBlock* = => ActionFn(105); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action105::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant42(__nt), __end)); + (0, 67) + } + fn __reduce154< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ScheduleBlock* = ScheduleBlock+ => ActionFn(106); + let __sym0 = __pop_Variant42(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action106::<>(__sym0); + __symbols.push((__start, __Symbol::Variant42(__nt), __end)); + (1, 67) + } + fn __reduce155< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ScheduleBlock+ = ScheduleBlock => ActionFn(152); + let __sym0 = __pop_Variant41(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action152::<>(__sym0); + __symbols.push((__start, __Symbol::Variant42(__nt), __end)); + (1, 68) + } + fn __reduce156< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // ScheduleBlock+ = ScheduleBlock+, ScheduleBlock => ActionFn(153); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant41(__symbols); + let __sym0 = __pop_Variant42(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action153::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant42(__nt), __end)); + (2, 68) + } + fn __reduce157< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // SelectorNode = "?", "{", BehaviorNode+, "}" => ActionFn(56); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant17(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym3.2; + let __nt = super::__action56::<>(__sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant12(__nt), __end)); + (4, 69) + } + fn __reduce158< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // SelfBlock = "self", "{", "}" => ActionFn(200); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action200::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant20(__nt), __end)); + (3, 70) + } + fn __reduce159< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // SelfBlock = "self", "{", Field+, "}" => ActionFn(201); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant9(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym3.2; + let __nt = super::__action201::<>(__sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant20(__nt), __end)); + (4, 70) + } + fn __reduce160< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // SelfBlock? = SelfBlock => ActionFn(95); + let __sym0 = __pop_Variant20(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action95::<>(__sym0); + __symbols.push((__start, __Symbol::Variant33(__nt), __end)); + (1, 71) + } + fn __reduce161< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // SelfBlock? = => ActionFn(96); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action96::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant33(__nt), __end)); + (0, 71) + } + fn __reduce162< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // SequenceNode = ">", "{", BehaviorNode+, "}" => ActionFn(57); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant17(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym3.2; + let __nt = super::__action57::<>(__sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant12(__nt), __end)); + (4, 72) + } + fn __reduce163< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Species = "species", Ident, "{", "}" => ActionFn(202); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym3.2; + let __nt = super::__action202::<>(__sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant43(__nt), __end)); + (4, 73) + } + fn __reduce164< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Species = "species", Ident, "{", Field+, "}" => ActionFn(203); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant0(__symbols); + let __sym3 = __pop_Variant9(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym4.2; + let __nt = super::__action203::<>(__sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant43(__nt), __end)); + (5, 73) + } + fn __reduce165< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // SubTreeNode = "@", Path => ActionFn(60); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant21(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action60::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant12(__nt), __end)); + (2, 74) + } + fn __reduce166< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Template = "template", Ident, "strict", "{", "}" => ActionFn(218); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant0(__symbols); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym4.2; + let __nt = super::__action218::<>(__sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + (5, 75) + } + fn __reduce167< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Template = "template", Ident, "strict", "{", Include+, "}" => ActionFn(219); + assert!(__symbols.len() >= 6); + let __sym5 = __pop_Variant0(__symbols); + let __sym4 = __pop_Variant6(__symbols); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym5.2; + let __nt = super::__action219::<>(__sym0, __sym1, __sym2, __sym3, __sym4, __sym5); + __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + (6, 75) + } + fn __reduce168< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Template = "template", Ident, "strict", "{", Field+, "}" => ActionFn(220); + assert!(__symbols.len() >= 6); + let __sym5 = __pop_Variant0(__symbols); + let __sym4 = __pop_Variant9(__symbols); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym5.2; + let __nt = super::__action220::<>(__sym0, __sym1, __sym2, __sym3, __sym4, __sym5); + __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + (6, 75) + } + fn __reduce169< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Template = "template", Ident, "strict", "{", Include+, Field+, "}" => ActionFn(221); + assert!(__symbols.len() >= 7); + let __sym6 = __pop_Variant0(__symbols); + let __sym5 = __pop_Variant9(__symbols); + let __sym4 = __pop_Variant6(__symbols); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym6.2; + let __nt = super::__action221::<>(__sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6); + __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + (7, 75) + } + fn __reduce170< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Template = "template", Ident, "{", "}" => ActionFn(222); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym3.2; + let __nt = super::__action222::<>(__sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + (4, 75) + } + fn __reduce171< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Template = "template", Ident, "{", Include+, "}" => ActionFn(223); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant0(__symbols); + let __sym3 = __pop_Variant6(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym4.2; + let __nt = super::__action223::<>(__sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + (5, 75) + } + fn __reduce172< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Template = "template", Ident, "{", Field+, "}" => ActionFn(224); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant0(__symbols); + let __sym3 = __pop_Variant9(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym4.2; + let __nt = super::__action224::<>(__sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + (5, 75) + } + fn __reduce173< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Template = "template", Ident, "{", Include+, Field+, "}" => ActionFn(225); + assert!(__symbols.len() >= 6); + let __sym5 = __pop_Variant0(__symbols); + let __sym4 = __pop_Variant9(__symbols); + let __sym3 = __pop_Variant6(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym5.2; + let __nt = super::__action225::<>(__sym0, __sym1, __sym2, __sym3, __sym4, __sym5); + __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + (6, 75) + } + fn __reduce174< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // TemplateClause = "from", Ident => ActionFn(169); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action169::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant21(__nt), __end)); + (2, 76) + } + fn __reduce175< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // TemplateClause = "from", Ident, ("," )+ => ActionFn(170); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant6(__symbols); + let __sym1 = __pop_Variant1(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action170::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant21(__nt), __end)); + (3, 76) + } + fn __reduce176< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // TemplateClause? = TemplateClause => ActionFn(123); + let __sym0 = __pop_Variant21(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action123::<>(__sym0); + __symbols.push((__start, __Symbol::Variant45(__nt), __end)); + (1, 77) + } + fn __reduce177< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // TemplateClause? = => ActionFn(124); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action124::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant45(__nt), __end)); + (0, 77) + } + fn __reduce178< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Time = TimeLit => ActionFn(39); + let __sym0 = __pop_Variant1(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action39::<>(__sym0); + __symbols.push((__start, __Symbol::Variant46(__nt), __end)); + (1, 78) + } + fn __reduce179< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Transition = "on", Expr, "->", Ident => ActionFn(48); + assert!(__symbols.len() >= 4); + let __sym3 = __pop_Variant1(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant13(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym3.2; + let __nt = super::__action48::<>(__sym0, __sym1, __sym2, __sym3); + __symbols.push((__start, __Symbol::Variant47(__nt), __end)); + (4, 79) + } + fn __reduce180< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Transition* = => ActionFn(107); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action107::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant48(__nt), __end)); + (0, 80) + } + fn __reduce181< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Transition* = Transition+ => ActionFn(108); + let __sym0 = __pop_Variant48(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action108::<>(__sym0); + __symbols.push((__start, __Symbol::Variant48(__nt), __end)); + (1, 80) + } + fn __reduce182< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Transition+ = Transition => ActionFn(150); + let __sym0 = __pop_Variant47(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action150::<>(__sym0); + __symbols.push((__start, __Symbol::Variant48(__nt), __end)); + (1, 81) + } + fn __reduce183< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Transition+ = Transition+, Transition => ActionFn(151); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant47(__symbols); + let __sym0 = __pop_Variant48(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action151::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant48(__nt), __end)); + (2, 81) + } + fn __reduce184< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // UseDecl = "use", Path, ";" => ActionFn(13); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant21(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action13::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant49(__nt), __end)); + (3, 82) + } + fn __reduce185< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // UseDecl = "use", PathSegments, "::", "{", Comma, "}", ";" => ActionFn(14); + assert!(__symbols.len() >= 7); + let __sym6 = __pop_Variant0(__symbols); + let __sym5 = __pop_Variant0(__symbols); + let __sym4 = __pop_Variant21(__symbols); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant21(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym6.2; + let __nt = super::__action14::<>(__sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6); + __symbols.push((__start, __Symbol::Variant49(__nt), __end)); + (7, 82) + } + fn __reduce186< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // UseDecl = "use", PathSegments, "::", "*", ";" => ActionFn(15); + assert!(__symbols.len() >= 5); + let __sym4 = __pop_Variant0(__symbols); + let __sym3 = __pop_Variant0(__symbols); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant21(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym4.2; + let __nt = super::__action15::<>(__sym0, __sym1, __sym2, __sym3, __sym4); + __symbols.push((__start, __Symbol::Variant49(__nt), __end)); + (5, 82) + } + fn __reduce187< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Value = IntLit => ActionFn(24); + let __sym0 = __pop_Variant2(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action24::<>(__sym0); + __symbols.push((__start, __Symbol::Variant10(__nt), __end)); + (1, 83) + } + fn __reduce188< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Value = FloatLit => ActionFn(25); + let __sym0 = __pop_Variant3(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action25::<>(__sym0); + __symbols.push((__start, __Symbol::Variant10(__nt), __end)); + (1, 83) + } + fn __reduce189< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Value = StringLit => ActionFn(26); + let __sym0 = __pop_Variant1(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action26::<>(__sym0); + __symbols.push((__start, __Symbol::Variant10(__nt), __end)); + (1, 83) + } + fn __reduce190< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Value = BoolLit => ActionFn(27); + let __sym0 = __pop_Variant18(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action27::<>(__sym0); + __symbols.push((__start, __Symbol::Variant10(__nt), __end)); + (1, 83) + } + fn __reduce191< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Value = IntLit, "..", IntLit => ActionFn(28); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant2(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant2(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action28::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant10(__nt), __end)); + (3, 83) + } + fn __reduce192< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Value = FloatLit, "..", FloatLit => ActionFn(29); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant3(__symbols); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant3(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action29::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant10(__nt), __end)); + (3, 83) + } + fn __reduce193< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Value = Time => ActionFn(30); + let __sym0 = __pop_Variant46(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action30::<>(__sym0); + __symbols.push((__start, __Symbol::Variant10(__nt), __end)); + (1, 83) + } + fn __reduce194< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Value = Duration => ActionFn(31); + let __sym0 = __pop_Variant25(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action31::<>(__sym0); + __symbols.push((__start, __Symbol::Variant10(__nt), __end)); + (1, 83) + } + fn __reduce195< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Value = Path => ActionFn(32); + let __sym0 = __pop_Variant21(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action32::<>(__sym0); + __symbols.push((__start, __Symbol::Variant10(__nt), __end)); + (1, 83) + } + fn __reduce196< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Value = ProseBlock => ActionFn(33); + let __sym0 = __pop_Variant4(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action33::<>(__sym0); + __symbols.push((__start, __Symbol::Variant10(__nt), __end)); + (1, 83) + } + fn __reduce197< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Value = "[", Comma, "]" => ActionFn(34); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant22(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action34::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant10(__nt), __end)); + (3, 83) + } + fn __reduce198< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Value = "{", "}" => ActionFn(208); + assert!(__symbols.len() >= 2); + let __sym1 = __pop_Variant0(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym1.2; + let __nt = super::__action208::<>(__sym0, __sym1); + __symbols.push((__start, __Symbol::Variant10(__nt), __end)); + (2, 83) + } + fn __reduce199< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Value = "{", Field+, "}" => ActionFn(209); + assert!(__symbols.len() >= 3); + let __sym2 = __pop_Variant0(__symbols); + let __sym1 = __pop_Variant9(__symbols); + let __sym0 = __pop_Variant0(__symbols); + let __start = __sym0.0; + let __end = __sym2.2; + let __nt = super::__action209::<>(__sym0, __sym1, __sym2); + __symbols.push((__start, __Symbol::Variant10(__nt), __end)); + (3, 83) + } + fn __reduce200< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Value = Override => ActionFn(36); + let __sym0 = __pop_Variant34(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action36::<>(__sym0); + __symbols.push((__start, __Symbol::Variant10(__nt), __end)); + (1, 83) + } + fn __reduce201< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Value? = Value => ActionFn(141); + let __sym0 = __pop_Variant10(__symbols); + let __start = __sym0.0; + let __end = __sym0.2; + let __nt = super::__action141::<>(__sym0); + __symbols.push((__start, __Symbol::Variant50(__nt), __end)); + (1, 84) + } + fn __reduce202< + >( + __lookahead_start: Option<&usize>, + __symbols: &mut alloc::vec::Vec<(usize,__Symbol<>,usize)>, + _: core::marker::PhantomData<()>, + ) -> (usize, usize) + { + // Value? = => ActionFn(142); + let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2)).unwrap_or_default(); + let __end = __start; + let __nt = super::__action142::<>(&__start, &__end); + __symbols.push((__start, __Symbol::Variant50(__nt), __end)); + (0, 84) + } +} +#[allow(unused_imports)] +pub use self::__parse__File::FileParser; + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action0((_, __0, _): (usize, File, usize)) -> File { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action1((_, declarations, _): (usize, alloc::vec::Vec, usize)) -> File { + File { declarations } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action2((_, u, _): (usize, UseDecl, usize)) -> Declaration { + Declaration::Use(u) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action3((_, c, _): (usize, Character, usize)) -> Declaration { + Declaration::Character(c) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action4((_, t, _): (usize, Template, usize)) -> Declaration { + Declaration::Template(t) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action5((_, l, _): (usize, LifeArc, usize)) -> Declaration { + Declaration::LifeArc(l) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action6((_, s, _): (usize, Schedule, usize)) -> Declaration { + Declaration::Schedule(s) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action7((_, b, _): (usize, Behavior, usize)) -> Declaration { + Declaration::Behavior(b) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action8((_, i, _): (usize, Institution, usize)) -> Declaration { + Declaration::Institution(i) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action9((_, r, _): (usize, Relationship, usize)) -> Declaration { + Declaration::Relationship(r) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action10((_, loc, _): (usize, Location, usize)) -> Declaration { + Declaration::Location(loc) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action11((_, sp, _): (usize, Species, usize)) -> Declaration { + Declaration::Species(sp) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action12((_, e, _): (usize, EnumDecl, usize)) -> Declaration { + Declaration::Enum(e) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action13( + (_, _, _): (usize, Token, usize), + (_, path, _): (usize, Vec, usize), + (_, _, _): (usize, Token, usize), +) -> UseDecl { + UseDecl { + path, + kind: UseKind::Single, + span: Span::new(0, 0), // TODO: track actual spans + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action14( + (_, _, _): (usize, Token, usize), + (_, base, _): (usize, Vec, usize), + (_, _, _): (usize, Token, usize), + (_, _, _): (usize, Token, usize), + (_, items, _): (usize, Vec, usize), + (_, _, _): (usize, Token, usize), + (_, _, _): (usize, Token, usize), +) -> UseDecl { + UseDecl { + path: base, + kind: UseKind::Grouped(items), + span: Span::new(0, 0), + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action15( + (_, _, _): (usize, Token, usize), + (_, path, _): (usize, Vec, usize), + (_, _, _): (usize, Token, usize), + (_, _, _): (usize, Token, usize), + (_, _, _): (usize, Token, usize), +) -> UseDecl { + UseDecl { + path, + kind: UseKind::Wildcard, + span: Span::new(0, 0), + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action16((_, __0, _): (usize, Vec, usize)) -> Vec { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action17((_, __0, _): (usize, String, usize)) -> Vec { + vec![__0] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action18( + (_, mut v, _): (usize, Vec, usize), + (_, _, _): (usize, Token, usize), + (_, i, _): (usize, String, usize), +) -> Vec { + { + v.push(i); + v + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action19( + (_, _, _): (usize, Token, usize), + (_, name, _): (usize, String, usize), + (_, template, _): (usize, Option>, usize), + (_, _, _): (usize, Token, usize), + (_, fields, _): (usize, alloc::vec::Vec, usize), + (_, _, _): (usize, Token, usize), +) -> Character { + Character { + name, + fields, + template, + span: Span::new(0, 0), + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action20( + (_, _, _): (usize, Token, usize), + (_, t, _): (usize, String, usize), + (_, rest, _): (usize, alloc::vec::Vec, usize), +) -> Vec { + { + let mut templates = vec![t]; + templates.extend(rest); + templates + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action21( + (_, _, _): (usize, Token, usize), + (_, name, _): (usize, String, usize), + (_, strict, _): (usize, Option, usize), + (_, _, _): (usize, Token, usize), + (_, includes, _): (usize, alloc::vec::Vec, usize), + (_, fields, _): (usize, alloc::vec::Vec, usize), + (_, _, _): (usize, Token, usize), +) -> Template { + Template { + name, + fields, + strict: strict.is_some(), + includes, + span: Span::new(0, 0), + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action22((_, _, _): (usize, Token, usize), (_, name, _): (usize, String, usize)) -> String { + name +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action23( + (_, name, _): (usize, String, usize), + (_, _, _): (usize, Token, usize), + (_, value, _): (usize, Value, usize), +) -> Field { + Field { + name, + value, + span: Span::new(0, 0), + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action24((_, __0, _): (usize, i64, usize)) -> Value { + Value::Int(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action25((_, __0, _): (usize, f64, usize)) -> Value { + Value::Float(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action26((_, __0, _): (usize, String, usize)) -> Value { + Value::String(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action27((_, __0, _): (usize, bool, usize)) -> Value { + Value::Bool(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action28( + (_, lo, _): (usize, i64, usize), + (_, _, _): (usize, Token, usize), + (_, hi, _): (usize, i64, usize), +) -> Value { + Value::Range(Box::new(Value::Int(lo)), Box::new(Value::Int(hi))) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action29( + (_, lo, _): (usize, f64, usize), + (_, _, _): (usize, Token, usize), + (_, hi, _): (usize, f64, usize), +) -> Value { + Value::Range(Box::new(Value::Float(lo)), Box::new(Value::Float(hi))) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action30((_, t, _): (usize, Time, usize)) -> Value { + Value::Time(t) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action31((_, d, _): (usize, Duration, usize)) -> Value { + Value::Duration(d) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action32((_, p, _): (usize, Vec, usize)) -> Value { + Value::Identifier(p) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action33((_, __0, _): (usize, ProseBlock, usize)) -> Value { + Value::ProseBlock(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action34( + (_, _, _): (usize, Token, usize), + (_, values, _): (usize, Vec, usize), + (_, _, _): (usize, Token, usize), +) -> Value { + Value::List(values) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action35( + (_, _, _): (usize, Token, usize), + (_, fields, _): (usize, alloc::vec::Vec, usize), + (_, _, _): (usize, Token, usize), +) -> Value { + Value::Object(fields) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action36((_, __0, _): (usize, Override, usize)) -> Value { + Value::Override(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action37((_, __0, _): (usize, Token, usize)) -> bool { + true +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action38((_, __0, _): (usize, Token, usize)) -> bool { + false +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action39((_, s, _): (usize, String, usize)) -> Time { + { + let parts: Vec<&str> = s.split(':').collect(); + let hour = parts[0].parse().unwrap_or(0); + let minute = parts[1].parse().unwrap_or(0); + let second = if parts.len() > 2 { + parts[2].parse().unwrap_or(0) + } else { + 0 + }; + Time { + hour, + minute, + second, + } + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action40((_, s, _): (usize, String, usize)) -> Duration { + { + let mut hours = 0; + let mut minutes = 0; + let mut seconds = 0; + + let mut num = String::new(); + for ch in s.chars() { + if ch.is_ascii_digit() { + num.push(ch); + } else { + let val: u32 = num.parse().unwrap_or(0); + match ch { + | 'h' => hours = val, + | 'm' => minutes = val, + | 's' => seconds = val, + | _ => {}, + } + num.clear(); + } + } + + Duration { + hours, + minutes, + seconds, + } + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action41((_, __0, _): (usize, ProseBlock, usize)) -> ProseBlock { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action42( + (_, _, _): (usize, Token, usize), + (_, base, _): (usize, Vec, usize), + (_, _, _): (usize, Token, usize), + (_, overrides, _): (usize, alloc::vec::Vec, usize), + (_, _, _): (usize, Token, usize), +) -> Override { + Override { + base, + overrides, + span: Span::new(0, 0), + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action43( + (_, _, _): (usize, Token, usize), + (_, name, _): (usize, String, usize), +) -> OverrideOp { + OverrideOp::Remove(name) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action44((_, _, _): (usize, Token, usize), (_, f, _): (usize, Field, usize)) -> OverrideOp { + OverrideOp::Append(f) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action45((_, f, _): (usize, Field, usize)) -> OverrideOp { + OverrideOp::Set(f) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action46( + (_, _, _): (usize, Token, usize), + (_, name, _): (usize, String, usize), + (_, _, _): (usize, Token, usize), + (_, states, _): (usize, alloc::vec::Vec, usize), + (_, _, _): (usize, Token, usize), +) -> LifeArc { + LifeArc { + name, + states, + span: Span::new(0, 0), + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action47( + (_, _, _): (usize, Token, usize), + (_, name, _): (usize, String, usize), + (_, _, _): (usize, Token, usize), + (_, transitions, _): (usize, alloc::vec::Vec, usize), + (_, _, _): (usize, Token, usize), +) -> ArcState { + ArcState { + name, + transitions, + span: Span::new(0, 0), + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action48( + (_, _, _): (usize, Token, usize), + (_, cond, _): (usize, Expr, usize), + (_, _, _): (usize, Token, usize), + (_, to, _): (usize, String, usize), +) -> Transition { + Transition { + to, + condition: cond, + span: Span::new(0, 0), + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action49( + (_, _, _): (usize, Token, usize), + (_, name, _): (usize, String, usize), + (_, _, _): (usize, Token, usize), + (_, blocks, _): (usize, alloc::vec::Vec, usize), + (_, _, _): (usize, Token, usize), +) -> Schedule { + Schedule { + name, + blocks, + span: Span::new(0, 0), + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action50( + (_, start, _): (usize, Time, usize), + (_, _, _): (usize, Token, usize), + (_, end, _): (usize, Time, usize), + (_, _, _): (usize, Token, usize), + (_, activity, _): (usize, String, usize), +) -> ScheduleBlock { + ScheduleBlock { + start, + end, + activity, + span: Span::new(0, 0), + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action51( + (_, _, _): (usize, Token, usize), + (_, name, _): (usize, String, usize), + (_, _, _): (usize, Token, usize), + (_, root, _): (usize, BehaviorNode, usize), + (_, _, _): (usize, Token, usize), +) -> Behavior { + Behavior { + name, + root, + span: Span::new(0, 0), + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action52((_, __0, _): (usize, BehaviorNode, usize)) -> BehaviorNode { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action53((_, __0, _): (usize, BehaviorNode, usize)) -> BehaviorNode { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action54((_, __0, _): (usize, BehaviorNode, usize)) -> BehaviorNode { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action55((_, __0, _): (usize, BehaviorNode, usize)) -> BehaviorNode { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action56( + (_, _, _): (usize, Token, usize), + (_, _, _): (usize, Token, usize), + (_, nodes, _): (usize, alloc::vec::Vec, usize), + (_, _, _): (usize, Token, usize), +) -> BehaviorNode { + BehaviorNode::Selector(nodes) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action57( + (_, _, _): (usize, Token, usize), + (_, _, _): (usize, Token, usize), + (_, nodes, _): (usize, alloc::vec::Vec, usize), + (_, _, _): (usize, Token, usize), +) -> BehaviorNode { + BehaviorNode::Sequence(nodes) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action58( + (_, name, _): (usize, String, usize), + (_, _, _): (usize, Token, usize), + (_, params, _): (usize, Vec, usize), + (_, _, _): (usize, Token, usize), +) -> BehaviorNode { + BehaviorNode::Action(name, params) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action59((_, name, _): (usize, String, usize)) -> BehaviorNode { + BehaviorNode::Action(name, vec![]) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action60( + (_, _, _): (usize, Token, usize), + (_, path, _): (usize, Vec, usize), +) -> BehaviorNode { + BehaviorNode::SubTree(path) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action61( + (_, _, _): (usize, Token, usize), + (_, name, _): (usize, String, usize), + (_, _, _): (usize, Token, usize), + (_, fields, _): (usize, alloc::vec::Vec, usize), + (_, _, _): (usize, Token, usize), +) -> Institution { + Institution { + name, + fields, + span: Span::new(0, 0), + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action62( + (_, _, _): (usize, Token, usize), + (_, name, _): (usize, String, usize), + (_, _, _): (usize, Token, usize), + (_, participants, _): (usize, alloc::vec::Vec, usize), + (_, fields, _): (usize, alloc::vec::Vec, usize), + (_, _, _): (usize, Token, usize), +) -> Relationship { + Relationship { + name, + participants, + fields, + span: Span::new(0, 0), + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action63( + (_, name, _): (usize, Vec, usize), + (_, role, _): (usize, Option, usize), + (_, self_block, _): (usize, Option>, usize), + (_, other_block, _): (usize, Option>, usize), +) -> Participant { + Participant { + role, + name, + self_block, + other_block, + span: Span::new(0, 0), + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action64( + (_, _, _): (usize, Token, usize), + (_, _, _): (usize, Token, usize), + (_, fields, _): (usize, alloc::vec::Vec, usize), + (_, _, _): (usize, Token, usize), +) -> Vec { + fields +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action65( + (_, _, _): (usize, Token, usize), + (_, _, _): (usize, Token, usize), + (_, fields, _): (usize, alloc::vec::Vec, usize), + (_, _, _): (usize, Token, usize), +) -> Vec { + fields +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action66( + (_, _, _): (usize, Token, usize), + (_, name, _): (usize, String, usize), + (_, _, _): (usize, Token, usize), + (_, fields, _): (usize, alloc::vec::Vec, usize), + (_, _, _): (usize, Token, usize), +) -> Location { + Location { + name, + fields, + span: Span::new(0, 0), + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action67( + (_, _, _): (usize, Token, usize), + (_, name, _): (usize, String, usize), + (_, _, _): (usize, Token, usize), + (_, fields, _): (usize, alloc::vec::Vec, usize), + (_, _, _): (usize, Token, usize), +) -> Species { + Species { + name, + fields, + span: Span::new(0, 0), + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action68( + (_, _, _): (usize, Token, usize), + (_, name, _): (usize, String, usize), + (_, _, _): (usize, Token, usize), + (_, variants, _): (usize, Vec, usize), + (_, _, _): (usize, Token, usize), +) -> EnumDecl { + EnumDecl { + name, + variants, + span: Span::new(0, 0), + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action69((_, __0, _): (usize, Expr, usize)) -> Expr { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action70( + (_, left, _): (usize, Expr, usize), + (_, _, _): (usize, Token, usize), + (_, right, _): (usize, Expr, usize), +) -> Expr { + { + Expr::Logical(Box::new(left), LogicalOp::Or, Box::new(right)) + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action71((_, __0, _): (usize, Expr, usize)) -> Expr { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action72( + (_, left, _): (usize, Expr, usize), + (_, _, _): (usize, Token, usize), + (_, right, _): (usize, Expr, usize), +) -> Expr { + { + Expr::Logical(Box::new(left), LogicalOp::And, Box::new(right)) + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action73((_, __0, _): (usize, Expr, usize)) -> Expr { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action74((_, _, _): (usize, Token, usize), (_, expr, _): (usize, Expr, usize)) -> Expr { + { + Expr::Unary(UnaryOp::Not, Box::new(expr)) + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action75((_, __0, _): (usize, Expr, usize)) -> Expr { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action76( + (_, left, _): (usize, Expr, usize), + (_, _, _): (usize, Token, usize), + (_, right, _): (usize, Expr, usize), +) -> Expr { + { + Expr::Comparison(Box::new(left), CompOp::Eq, Box::new(right)) + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action77( + (_, left, _): (usize, Expr, usize), + (_, op, _): (usize, CompOp, usize), + (_, right, _): (usize, Expr, usize), +) -> Expr { + { + Expr::Comparison(Box::new(left), op, Box::new(right)) + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action78((_, __0, _): (usize, Expr, usize)) -> Expr { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action79( + (_, base, _): (usize, Expr, usize), + (_, _, _): (usize, Token, usize), + (_, field, _): (usize, String, usize), +) -> Expr { + { + Expr::FieldAccess(Box::new(base), field) + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action80((_, __0, _): (usize, Expr, usize)) -> Expr { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action81((_, __0, _): (usize, Token, usize)) -> Expr { + Expr::Identifier(vec!["self".to_string()]) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action82((_, __0, _): (usize, Token, usize)) -> Expr { + Expr::Identifier(vec!["other".to_string()]) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action83((_, __0, _): (usize, Expr, usize)) -> Expr { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action84((_, __0, _): (usize, Vec, usize)) -> Expr { + Expr::Identifier(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action85((_, __0, _): (usize, Token, usize)) -> CompOp { + CompOp::Gt +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action86((_, __0, _): (usize, Token, usize)) -> CompOp { + CompOp::Ge +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action87((_, __0, _): (usize, Token, usize)) -> CompOp { + CompOp::Lt +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action88((_, __0, _): (usize, Token, usize)) -> CompOp { + CompOp::Le +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action89((_, __0, _): (usize, i64, usize)) -> Expr { + Expr::IntLit(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action90((_, __0, _): (usize, f64, usize)) -> Expr { + Expr::FloatLit(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action91((_, __0, _): (usize, String, usize)) -> Expr { + Expr::StringLit(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action92((_, __0, _): (usize, bool, usize)) -> Expr { + Expr::BoolLit(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action93((_, __0, _): (usize, Vec, usize)) -> Option> { + Some(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action94(__lookbehind: &usize, __lookahead: &usize) -> Option> { + None +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action95((_, __0, _): (usize, Vec, usize)) -> Option> { + Some(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action96(__lookbehind: &usize, __lookahead: &usize) -> Option> { + None +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action97((_, __0, _): (usize, String, usize)) -> Option { + Some(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action98(__lookbehind: &usize, __lookahead: &usize) -> Option { + None +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action99((_, _, _): (usize, Token, usize), (_, __0, _): (usize, String, usize)) -> String { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action100((_, __0, _): (usize, Participant, usize)) -> alloc::vec::Vec { + alloc::vec![__0] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action101( + (_, v, _): (usize, alloc::vec::Vec, usize), + (_, e, _): (usize, Participant, usize), +) -> alloc::vec::Vec { + { + let mut v = v; + v.push(e); + v + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action102( + (_, v, _): (usize, alloc::vec::Vec, usize), + (_, e, _): (usize, Option, usize), +) -> Vec { + match e { + | None => v, + | Some(e) => { + let mut v = v; + v.push(e); + v + }, + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action103((_, __0, _): (usize, BehaviorNode, usize)) -> alloc::vec::Vec { + alloc::vec![__0] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action104( + (_, v, _): (usize, alloc::vec::Vec, usize), + (_, e, _): (usize, BehaviorNode, usize), +) -> alloc::vec::Vec { + { + let mut v = v; + v.push(e); + v + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action105(__lookbehind: &usize, __lookahead: &usize) -> alloc::vec::Vec { + alloc::vec![] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action106( + (_, v, _): (usize, alloc::vec::Vec, usize), +) -> alloc::vec::Vec { + v +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action107(__lookbehind: &usize, __lookahead: &usize) -> alloc::vec::Vec { + alloc::vec![] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action108( + (_, v, _): (usize, alloc::vec::Vec, usize), +) -> alloc::vec::Vec { + v +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action109(__lookbehind: &usize, __lookahead: &usize) -> alloc::vec::Vec { + alloc::vec![] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action110((_, v, _): (usize, alloc::vec::Vec, usize)) -> alloc::vec::Vec { + v +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action111(__lookbehind: &usize, __lookahead: &usize) -> alloc::vec::Vec { + alloc::vec![] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action112( + (_, v, _): (usize, alloc::vec::Vec, usize), +) -> alloc::vec::Vec { + v +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action113( + (_, v, _): (usize, alloc::vec::Vec, usize), + (_, e, _): (usize, Option, usize), +) -> Vec { + match e { + | None => v, + | Some(e) => { + let mut v = v; + v.push(e); + v + }, + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action114(__lookbehind: &usize, __lookahead: &usize) -> alloc::vec::Vec { + alloc::vec![] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action115((_, v, _): (usize, alloc::vec::Vec, usize)) -> alloc::vec::Vec { + v +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action116((_, __0, _): (usize, Token, usize)) -> Option { + Some(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action117(__lookbehind: &usize, __lookahead: &usize) -> Option { + None +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action118(__lookbehind: &usize, __lookahead: &usize) -> alloc::vec::Vec { + alloc::vec![] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action119((_, v, _): (usize, alloc::vec::Vec, usize)) -> alloc::vec::Vec { + v +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action120((_, _, _): (usize, Token, usize), (_, __0, _): (usize, String, usize)) -> String { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action121(__lookbehind: &usize, __lookahead: &usize) -> alloc::vec::Vec { + alloc::vec![] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action122((_, v, _): (usize, alloc::vec::Vec, usize)) -> alloc::vec::Vec { + v +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action123((_, __0, _): (usize, Vec, usize)) -> Option> { + Some(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action124(__lookbehind: &usize, __lookahead: &usize) -> Option> { + None +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action125( + (_, v, _): (usize, alloc::vec::Vec, usize), + (_, e, _): (usize, Option, usize), +) -> Vec { + match e { + | None => v, + | Some(e) => { + let mut v = v; + v.push(e); + v + }, + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action126(__lookbehind: &usize, __lookahead: &usize) -> alloc::vec::Vec { + alloc::vec![] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action127( + (_, v, _): (usize, alloc::vec::Vec, usize), +) -> alloc::vec::Vec { + v +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action128((_, __0, _): (usize, Declaration, usize)) -> alloc::vec::Vec { + alloc::vec![__0] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action129( + (_, v, _): (usize, alloc::vec::Vec, usize), + (_, e, _): (usize, Declaration, usize), +) -> alloc::vec::Vec { + { + let mut v = v; + v.push(e); + v + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action130((_, __0, _): (usize, String, usize)) -> Option { + Some(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action131(__lookbehind: &usize, __lookahead: &usize) -> Option { + None +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action132(__lookbehind: &usize, __lookahead: &usize) -> alloc::vec::Vec { + alloc::vec![] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action133((_, v, _): (usize, alloc::vec::Vec, usize)) -> alloc::vec::Vec { + v +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action134((_, __0, _): (usize, String, usize), (_, _, _): (usize, Token, usize)) -> String { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action135((_, __0, _): (usize, Field, usize)) -> alloc::vec::Vec { + alloc::vec![__0] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action136( + (_, v, _): (usize, alloc::vec::Vec, usize), + (_, e, _): (usize, Field, usize), +) -> alloc::vec::Vec { + { + let mut v = v; + v.push(e); + v + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action137((_, __0, _): (usize, String, usize)) -> alloc::vec::Vec { + alloc::vec![__0] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action138( + (_, v, _): (usize, alloc::vec::Vec, usize), + (_, e, _): (usize, String, usize), +) -> alloc::vec::Vec { + { + let mut v = v; + v.push(e); + v + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action139((_, __0, _): (usize, String, usize)) -> alloc::vec::Vec { + alloc::vec![__0] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action140( + (_, v, _): (usize, alloc::vec::Vec, usize), + (_, e, _): (usize, String, usize), +) -> alloc::vec::Vec { + { + let mut v = v; + v.push(e); + v + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action141((_, __0, _): (usize, Value, usize)) -> Option { + Some(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action142(__lookbehind: &usize, __lookahead: &usize) -> Option { + None +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action143(__lookbehind: &usize, __lookahead: &usize) -> alloc::vec::Vec { + alloc::vec![] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action144((_, v, _): (usize, alloc::vec::Vec, usize)) -> alloc::vec::Vec { + v +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action145((_, __0, _): (usize, Value, usize), (_, _, _): (usize, Token, usize)) -> Value { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action146((_, __0, _): (usize, OverrideOp, usize)) -> alloc::vec::Vec { + alloc::vec![__0] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action147( + (_, v, _): (usize, alloc::vec::Vec, usize), + (_, e, _): (usize, OverrideOp, usize), +) -> alloc::vec::Vec { + { + let mut v = v; + v.push(e); + v + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action148((_, __0, _): (usize, ArcState, usize)) -> alloc::vec::Vec { + alloc::vec![__0] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action149( + (_, v, _): (usize, alloc::vec::Vec, usize), + (_, e, _): (usize, ArcState, usize), +) -> alloc::vec::Vec { + { + let mut v = v; + v.push(e); + v + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action150((_, __0, _): (usize, Transition, usize)) -> alloc::vec::Vec { + alloc::vec![__0] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action151( + (_, v, _): (usize, alloc::vec::Vec, usize), + (_, e, _): (usize, Transition, usize), +) -> alloc::vec::Vec { + { + let mut v = v; + v.push(e); + v + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action152((_, __0, _): (usize, ScheduleBlock, usize)) -> alloc::vec::Vec { + alloc::vec![__0] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action153( + (_, v, _): (usize, alloc::vec::Vec, usize), + (_, e, _): (usize, ScheduleBlock, usize), +) -> alloc::vec::Vec { + { + let mut v = v; + v.push(e); + v + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action154((_, __0, _): (usize, Field, usize)) -> Option { + Some(__0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action155(__lookbehind: &usize, __lookahead: &usize) -> Option { + None +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action156(__lookbehind: &usize, __lookahead: &usize) -> alloc::vec::Vec { + alloc::vec![] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action157((_, v, _): (usize, alloc::vec::Vec, usize)) -> alloc::vec::Vec { + v +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action158((_, __0, _): (usize, Field, usize), (_, _, _): (usize, Token, usize)) -> Field { + __0 +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action159((_, __0, _): (usize, Field, usize)) -> alloc::vec::Vec { + alloc::vec![__0] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action160( + (_, v, _): (usize, alloc::vec::Vec, usize), + (_, e, _): (usize, Field, usize), +) -> alloc::vec::Vec { + { + let mut v = v; + v.push(e); + v + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action161((_, __0, _): (usize, Value, usize)) -> alloc::vec::Vec { + alloc::vec![__0] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action162( + (_, v, _): (usize, alloc::vec::Vec, usize), + (_, e, _): (usize, Value, usize), +) -> alloc::vec::Vec { + { + let mut v = v; + v.push(e); + v + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action163((_, __0, _): (usize, String, usize)) -> alloc::vec::Vec { + alloc::vec![__0] +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action164( + (_, v, _): (usize, alloc::vec::Vec, usize), + (_, e, _): (usize, String, usize), +) -> alloc::vec::Vec { + { + let mut v = v; + v.push(e); + v + } +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action165( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, Token, usize), + __4: (usize, alloc::vec::Vec, usize), + __5: (usize, alloc::vec::Vec, usize), + __6: (usize, Token, usize), +) -> Template { + let __start0 = __2.0; + let __end0 = __2.2; + let __temp0 = __action116(__2); + let __temp0 = (__start0, __temp0, __end0); + __action21(__0, __1, __temp0, __3, __4, __5, __6) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action166( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, alloc::vec::Vec, usize), + __4: (usize, alloc::vec::Vec, usize), + __5: (usize, Token, usize), +) -> Template { + let __start0 = __1.2; + let __end0 = __2.0; + let __temp0 = __action117(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action21(__0, __1, __temp0, __2, __3, __4, __5) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action167(__0: (usize, Token, usize), __1: (usize, String, usize)) -> alloc::vec::Vec { + let __start0 = __0.0; + let __end0 = __1.2; + let __temp0 = __action120(__0, __1); + let __temp0 = (__start0, __temp0, __end0); + __action137(__temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action168( + __0: (usize, alloc::vec::Vec, usize), + __1: (usize, Token, usize), + __2: (usize, String, usize), +) -> alloc::vec::Vec { + let __start0 = __1.0; + let __end0 = __2.2; + let __temp0 = __action120(__1, __2); + let __temp0 = (__start0, __temp0, __end0); + __action138(__0, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action169(__0: (usize, Token, usize), __1: (usize, String, usize)) -> Vec { + let __start0 = __1.2; + let __end0 = __1.2; + let __temp0 = __action118(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action20(__0, __1, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action170( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, alloc::vec::Vec, usize), +) -> Vec { + let __start0 = __2.0; + let __end0 = __2.2; + let __temp0 = __action119(__2); + let __temp0 = (__start0, __temp0, __end0); + __action20(__0, __1, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action171(__0: (usize, Token, usize), __1: (usize, String, usize)) -> Option { + let __start0 = __0.0; + let __end0 = __1.2; + let __temp0 = __action99(__0, __1); + let __temp0 = (__start0, __temp0, __end0); + __action97(__temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action172( + __0: (usize, Vec, usize), + __1: (usize, Token, usize), + __2: (usize, String, usize), + __3: (usize, Option>, usize), + __4: (usize, Option>, usize), +) -> Participant { + let __start0 = __1.0; + let __end0 = __2.2; + let __temp0 = __action171(__1, __2); + let __temp0 = (__start0, __temp0, __end0); + __action63(__0, __temp0, __3, __4) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action173( + __0: (usize, Vec, usize), + __1: (usize, Option>, usize), + __2: (usize, Option>, usize), +) -> Participant { + let __start0 = __0.2; + let __end0 = __1.0; + let __temp0 = __action98(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action63(__0, __temp0, __1, __2) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action174(__0: (usize, Field, usize), __1: (usize, Token, usize)) -> alloc::vec::Vec { + let __start0 = __0.0; + let __end0 = __1.2; + let __temp0 = __action158(__0, __1); + let __temp0 = (__start0, __temp0, __end0); + __action159(__temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action175( + __0: (usize, alloc::vec::Vec, usize), + __1: (usize, Field, usize), + __2: (usize, Token, usize), +) -> alloc::vec::Vec { + let __start0 = __1.0; + let __end0 = __2.2; + let __temp0 = __action158(__1, __2); + let __temp0 = (__start0, __temp0, __end0); + __action160(__0, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action176(__0: (usize, Option, usize)) -> Vec { + let __start0 = __0.0; + let __end0 = __0.0; + let __temp0 = __action156(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action102(__temp0, __0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action177( + __0: (usize, alloc::vec::Vec, usize), + __1: (usize, Option, usize), +) -> Vec { + let __start0 = __0.0; + let __end0 = __0.2; + let __temp0 = __action157(__0); + let __temp0 = (__start0, __temp0, __end0); + __action102(__temp0, __1) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action178(__0: (usize, String, usize), __1: (usize, Token, usize)) -> alloc::vec::Vec { + let __start0 = __0.0; + let __end0 = __1.2; + let __temp0 = __action134(__0, __1); + let __temp0 = (__start0, __temp0, __end0); + __action163(__temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action179( + __0: (usize, alloc::vec::Vec, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), +) -> alloc::vec::Vec { + let __start0 = __1.0; + let __end0 = __2.2; + let __temp0 = __action134(__1, __2); + let __temp0 = (__start0, __temp0, __end0); + __action164(__0, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action180(__0: (usize, Option, usize)) -> Vec { + let __start0 = __0.0; + let __end0 = __0.0; + let __temp0 = __action132(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action125(__temp0, __0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action181( + __0: (usize, alloc::vec::Vec, usize), + __1: (usize, Option, usize), +) -> Vec { + let __start0 = __0.0; + let __end0 = __0.2; + let __temp0 = __action133(__0); + let __temp0 = (__start0, __temp0, __end0); + __action125(__temp0, __1) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action182(__0: (usize, Value, usize), __1: (usize, Token, usize)) -> alloc::vec::Vec { + let __start0 = __0.0; + let __end0 = __1.2; + let __temp0 = __action145(__0, __1); + let __temp0 = (__start0, __temp0, __end0); + __action161(__temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action183( + __0: (usize, alloc::vec::Vec, usize), + __1: (usize, Value, usize), + __2: (usize, Token, usize), +) -> alloc::vec::Vec { + let __start0 = __1.0; + let __end0 = __2.2; + let __temp0 = __action145(__1, __2); + let __temp0 = (__start0, __temp0, __end0); + __action162(__0, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action184(__0: (usize, Option, usize)) -> Vec { + let __start0 = __0.0; + let __end0 = __0.0; + let __temp0 = __action143(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action113(__temp0, __0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action185( + __0: (usize, alloc::vec::Vec, usize), + __1: (usize, Option, usize), +) -> Vec { + let __start0 = __0.0; + let __end0 = __0.2; + let __temp0 = __action144(__0); + let __temp0 = (__start0, __temp0, __end0); + __action113(__temp0, __1) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action186( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, Token, usize), +) -> LifeArc { + let __start0 = __2.2; + let __end0 = __3.0; + let __temp0 = __action109(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action46(__0, __1, __2, __temp0, __3) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action187( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, alloc::vec::Vec, usize), + __4: (usize, Token, usize), +) -> LifeArc { + let __start0 = __3.0; + let __end0 = __3.2; + let __temp0 = __action110(__3); + let __temp0 = (__start0, __temp0, __end0); + __action46(__0, __1, __2, __temp0, __4) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action188(__lookbehind: &usize, __lookahead: &usize) -> File { + let __start0 = *__lookbehind; + let __end0 = *__lookahead; + let __temp0 = __action126(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action1(__temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action189(__0: (usize, alloc::vec::Vec, usize)) -> File { + let __start0 = __0.0; + let __end0 = __0.2; + let __temp0 = __action127(__0); + let __temp0 = (__start0, __temp0, __end0); + __action1(__temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action190( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Option>, usize), + __3: (usize, Token, usize), + __4: (usize, Token, usize), +) -> Character { + let __start0 = __3.2; + let __end0 = __4.0; + let __temp0 = __action121(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action19(__0, __1, __2, __3, __temp0, __4) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action191( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Option>, usize), + __3: (usize, Token, usize), + __4: (usize, alloc::vec::Vec, usize), + __5: (usize, Token, usize), +) -> Character { + let __start0 = __4.0; + let __end0 = __4.2; + let __temp0 = __action122(__4); + let __temp0 = (__start0, __temp0, __end0); + __action19(__0, __1, __2, __3, __temp0, __5) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action192( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, Token, usize), +) -> Institution { + let __start0 = __2.2; + let __end0 = __3.0; + let __temp0 = __action121(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action61(__0, __1, __2, __temp0, __3) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action193( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, alloc::vec::Vec, usize), + __4: (usize, Token, usize), +) -> Institution { + let __start0 = __3.0; + let __end0 = __3.2; + let __temp0 = __action122(__3); + let __temp0 = (__start0, __temp0, __end0); + __action61(__0, __1, __2, __temp0, __4) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action194( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, Token, usize), +) -> Location { + let __start0 = __2.2; + let __end0 = __3.0; + let __temp0 = __action121(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action66(__0, __1, __2, __temp0, __3) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action195( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, alloc::vec::Vec, usize), + __4: (usize, Token, usize), +) -> Location { + let __start0 = __3.0; + let __end0 = __3.2; + let __temp0 = __action122(__3); + let __temp0 = (__start0, __temp0, __end0); + __action66(__0, __1, __2, __temp0, __4) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action196( + __0: (usize, Token, usize), + __1: (usize, Token, usize), + __2: (usize, Token, usize), +) -> Vec { + let __start0 = __1.2; + let __end0 = __2.0; + let __temp0 = __action121(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action65(__0, __1, __temp0, __2) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action197( + __0: (usize, Token, usize), + __1: (usize, Token, usize), + __2: (usize, alloc::vec::Vec, usize), + __3: (usize, Token, usize), +) -> Vec { + let __start0 = __2.0; + let __end0 = __2.2; + let __temp0 = __action122(__2); + let __temp0 = (__start0, __temp0, __end0); + __action65(__0, __1, __temp0, __3) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action198( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, alloc::vec::Vec, usize), + __4: (usize, Token, usize), +) -> Relationship { + let __start0 = __3.2; + let __end0 = __4.0; + let __temp0 = __action121(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action62(__0, __1, __2, __3, __temp0, __4) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action199( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, alloc::vec::Vec, usize), + __4: (usize, alloc::vec::Vec, usize), + __5: (usize, Token, usize), +) -> Relationship { + let __start0 = __4.0; + let __end0 = __4.2; + let __temp0 = __action122(__4); + let __temp0 = (__start0, __temp0, __end0); + __action62(__0, __1, __2, __3, __temp0, __5) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action200( + __0: (usize, Token, usize), + __1: (usize, Token, usize), + __2: (usize, Token, usize), +) -> Vec { + let __start0 = __1.2; + let __end0 = __2.0; + let __temp0 = __action121(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action64(__0, __1, __temp0, __2) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action201( + __0: (usize, Token, usize), + __1: (usize, Token, usize), + __2: (usize, alloc::vec::Vec, usize), + __3: (usize, Token, usize), +) -> Vec { + let __start0 = __2.0; + let __end0 = __2.2; + let __temp0 = __action122(__2); + let __temp0 = (__start0, __temp0, __end0); + __action64(__0, __1, __temp0, __3) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action202( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, Token, usize), +) -> Species { + let __start0 = __2.2; + let __end0 = __3.0; + let __temp0 = __action121(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action67(__0, __1, __2, __temp0, __3) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action203( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, alloc::vec::Vec, usize), + __4: (usize, Token, usize), +) -> Species { + let __start0 = __3.0; + let __end0 = __3.2; + let __temp0 = __action122(__3); + let __temp0 = (__start0, __temp0, __end0); + __action67(__0, __1, __2, __temp0, __4) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action204( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, Token, usize), + __4: (usize, alloc::vec::Vec, usize), + __5: (usize, Token, usize), +) -> Template { + let __start0 = __4.2; + let __end0 = __5.0; + let __temp0 = __action121(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action165(__0, __1, __2, __3, __4, __temp0, __5) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action205( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, Token, usize), + __4: (usize, alloc::vec::Vec, usize), + __5: (usize, alloc::vec::Vec, usize), + __6: (usize, Token, usize), +) -> Template { + let __start0 = __5.0; + let __end0 = __5.2; + let __temp0 = __action122(__5); + let __temp0 = (__start0, __temp0, __end0); + __action165(__0, __1, __2, __3, __4, __temp0, __6) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action206( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, alloc::vec::Vec, usize), + __4: (usize, Token, usize), +) -> Template { + let __start0 = __3.2; + let __end0 = __4.0; + let __temp0 = __action121(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action166(__0, __1, __2, __3, __temp0, __4) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action207( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, alloc::vec::Vec, usize), + __4: (usize, alloc::vec::Vec, usize), + __5: (usize, Token, usize), +) -> Template { + let __start0 = __4.0; + let __end0 = __4.2; + let __temp0 = __action122(__4); + let __temp0 = (__start0, __temp0, __end0); + __action166(__0, __1, __2, __3, __temp0, __5) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action208(__0: (usize, Token, usize), __1: (usize, Token, usize)) -> Value { + let __start0 = __0.2; + let __end0 = __1.0; + let __temp0 = __action121(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action35(__0, __temp0, __1) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action209( + __0: (usize, Token, usize), + __1: (usize, alloc::vec::Vec, usize), + __2: (usize, Token, usize), +) -> Value { + let __start0 = __1.0; + let __end0 = __1.2; + let __temp0 = __action122(__1); + let __temp0 = (__start0, __temp0, __end0); + __action35(__0, __temp0, __2) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action210(__0: (usize, Field, usize)) -> Vec { + let __start0 = __0.0; + let __end0 = __0.2; + let __temp0 = __action154(__0); + let __temp0 = (__start0, __temp0, __end0); + __action176(__temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action211(__lookbehind: &usize, __lookahead: &usize) -> Vec { + let __start0 = *__lookbehind; + let __end0 = *__lookahead; + let __temp0 = __action155(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action176(__temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action212( + __0: (usize, alloc::vec::Vec, usize), + __1: (usize, Field, usize), +) -> Vec { + let __start0 = __1.0; + let __end0 = __1.2; + let __temp0 = __action154(__1); + let __temp0 = (__start0, __temp0, __end0); + __action177(__0, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action213(__0: (usize, alloc::vec::Vec, usize)) -> Vec { + let __start0 = __0.2; + let __end0 = __0.2; + let __temp0 = __action155(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action177(__0, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action214(__0: (usize, String, usize)) -> Vec { + let __start0 = __0.0; + let __end0 = __0.2; + let __temp0 = __action130(__0); + let __temp0 = (__start0, __temp0, __end0); + __action180(__temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action215(__lookbehind: &usize, __lookahead: &usize) -> Vec { + let __start0 = *__lookbehind; + let __end0 = *__lookahead; + let __temp0 = __action131(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action180(__temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action216( + __0: (usize, alloc::vec::Vec, usize), + __1: (usize, String, usize), +) -> Vec { + let __start0 = __1.0; + let __end0 = __1.2; + let __temp0 = __action130(__1); + let __temp0 = (__start0, __temp0, __end0); + __action181(__0, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action217(__0: (usize, alloc::vec::Vec, usize)) -> Vec { + let __start0 = __0.2; + let __end0 = __0.2; + let __temp0 = __action131(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action181(__0, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action218( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, Token, usize), + __4: (usize, Token, usize), +) -> Template { + let __start0 = __3.2; + let __end0 = __4.0; + let __temp0 = __action114(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action204(__0, __1, __2, __3, __temp0, __4) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action219( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, Token, usize), + __4: (usize, alloc::vec::Vec, usize), + __5: (usize, Token, usize), +) -> Template { + let __start0 = __4.0; + let __end0 = __4.2; + let __temp0 = __action115(__4); + let __temp0 = (__start0, __temp0, __end0); + __action204(__0, __1, __2, __3, __temp0, __5) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action220( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, Token, usize), + __4: (usize, alloc::vec::Vec, usize), + __5: (usize, Token, usize), +) -> Template { + let __start0 = __3.2; + let __end0 = __4.0; + let __temp0 = __action114(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action205(__0, __1, __2, __3, __temp0, __4, __5) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action221( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, Token, usize), + __4: (usize, alloc::vec::Vec, usize), + __5: (usize, alloc::vec::Vec, usize), + __6: (usize, Token, usize), +) -> Template { + let __start0 = __4.0; + let __end0 = __4.2; + let __temp0 = __action115(__4); + let __temp0 = (__start0, __temp0, __end0); + __action205(__0, __1, __2, __3, __temp0, __5, __6) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action222( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, Token, usize), +) -> Template { + let __start0 = __2.2; + let __end0 = __3.0; + let __temp0 = __action114(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action206(__0, __1, __2, __temp0, __3) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action223( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, alloc::vec::Vec, usize), + __4: (usize, Token, usize), +) -> Template { + let __start0 = __3.0; + let __end0 = __3.2; + let __temp0 = __action115(__3); + let __temp0 = (__start0, __temp0, __end0); + __action206(__0, __1, __2, __temp0, __4) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action224( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, alloc::vec::Vec, usize), + __4: (usize, Token, usize), +) -> Template { + let __start0 = __2.2; + let __end0 = __3.0; + let __temp0 = __action114(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action207(__0, __1, __2, __temp0, __3, __4) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action225( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, alloc::vec::Vec, usize), + __4: (usize, alloc::vec::Vec, usize), + __5: (usize, Token, usize), +) -> Template { + let __start0 = __3.0; + let __end0 = __3.2; + let __temp0 = __action115(__3); + let __temp0 = (__start0, __temp0, __end0); + __action207(__0, __1, __2, __temp0, __4, __5) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action226( + __0: (usize, Vec, usize), + __1: (usize, Token, usize), + __2: (usize, String, usize), + __3: (usize, Option>, usize), + __4: (usize, Vec, usize), +) -> Participant { + let __start0 = __4.0; + let __end0 = __4.2; + let __temp0 = __action93(__4); + let __temp0 = (__start0, __temp0, __end0); + __action172(__0, __1, __2, __3, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action227( + __0: (usize, Vec, usize), + __1: (usize, Token, usize), + __2: (usize, String, usize), + __3: (usize, Option>, usize), +) -> Participant { + let __start0 = __3.2; + let __end0 = __3.2; + let __temp0 = __action94(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action172(__0, __1, __2, __3, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action228( + __0: (usize, Vec, usize), + __1: (usize, Option>, usize), + __2: (usize, Vec, usize), +) -> Participant { + let __start0 = __2.0; + let __end0 = __2.2; + let __temp0 = __action93(__2); + let __temp0 = (__start0, __temp0, __end0); + __action173(__0, __1, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action229( + __0: (usize, Vec, usize), + __1: (usize, Option>, usize), +) -> Participant { + let __start0 = __1.2; + let __end0 = __1.2; + let __temp0 = __action94(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action173(__0, __1, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action230( + __0: (usize, Token, usize), + __1: (usize, Vec, usize), + __2: (usize, Token, usize), + __3: (usize, Token, usize), +) -> Override { + let __start0 = __2.2; + let __end0 = __3.0; + let __temp0 = __action111(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action42(__0, __1, __2, __temp0, __3) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action231( + __0: (usize, Token, usize), + __1: (usize, Vec, usize), + __2: (usize, Token, usize), + __3: (usize, alloc::vec::Vec, usize), + __4: (usize, Token, usize), +) -> Override { + let __start0 = __3.0; + let __end0 = __3.2; + let __temp0 = __action112(__3); + let __temp0 = (__start0, __temp0, __end0); + __action42(__0, __1, __2, __temp0, __4) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action232( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, Token, usize), +) -> Schedule { + let __start0 = __2.2; + let __end0 = __3.0; + let __temp0 = __action105(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action49(__0, __1, __2, __temp0, __3) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action233( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, alloc::vec::Vec, usize), + __4: (usize, Token, usize), +) -> Schedule { + let __start0 = __3.0; + let __end0 = __3.2; + let __temp0 = __action106(__3); + let __temp0 = (__start0, __temp0, __end0); + __action49(__0, __1, __2, __temp0, __4) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action234( + __0: (usize, Vec, usize), + __1: (usize, Token, usize), + __2: (usize, String, usize), + __3: (usize, Vec, usize), + __4: (usize, Vec, usize), +) -> Participant { + let __start0 = __3.0; + let __end0 = __3.2; + let __temp0 = __action95(__3); + let __temp0 = (__start0, __temp0, __end0); + __action226(__0, __1, __2, __temp0, __4) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action235( + __0: (usize, Vec, usize), + __1: (usize, Token, usize), + __2: (usize, String, usize), + __3: (usize, Vec, usize), +) -> Participant { + let __start0 = __2.2; + let __end0 = __3.0; + let __temp0 = __action96(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action226(__0, __1, __2, __temp0, __3) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action236( + __0: (usize, Vec, usize), + __1: (usize, Token, usize), + __2: (usize, String, usize), + __3: (usize, Vec, usize), +) -> Participant { + let __start0 = __3.0; + let __end0 = __3.2; + let __temp0 = __action95(__3); + let __temp0 = (__start0, __temp0, __end0); + __action227(__0, __1, __2, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action237( + __0: (usize, Vec, usize), + __1: (usize, Token, usize), + __2: (usize, String, usize), +) -> Participant { + let __start0 = __2.2; + let __end0 = __2.2; + let __temp0 = __action96(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action227(__0, __1, __2, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action238( + __0: (usize, Vec, usize), + __1: (usize, Vec, usize), + __2: (usize, Vec, usize), +) -> Participant { + let __start0 = __1.0; + let __end0 = __1.2; + let __temp0 = __action95(__1); + let __temp0 = (__start0, __temp0, __end0); + __action228(__0, __temp0, __2) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action239(__0: (usize, Vec, usize), __1: (usize, Vec, usize)) -> Participant { + let __start0 = __0.2; + let __end0 = __1.0; + let __temp0 = __action96(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action228(__0, __temp0, __1) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action240(__0: (usize, Vec, usize), __1: (usize, Vec, usize)) -> Participant { + let __start0 = __1.0; + let __end0 = __1.2; + let __temp0 = __action95(__1); + let __temp0 = (__start0, __temp0, __end0); + __action229(__0, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action241(__0: (usize, Vec, usize)) -> Participant { + let __start0 = __0.2; + let __end0 = __0.2; + let __temp0 = __action96(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action229(__0, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action242( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Vec, usize), + __3: (usize, Token, usize), + __4: (usize, Token, usize), +) -> Character { + let __start0 = __2.0; + let __end0 = __2.2; + let __temp0 = __action123(__2); + let __temp0 = (__start0, __temp0, __end0); + __action190(__0, __1, __temp0, __3, __4) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action243( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, Token, usize), +) -> Character { + let __start0 = __1.2; + let __end0 = __2.0; + let __temp0 = __action124(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action190(__0, __1, __temp0, __2, __3) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action244( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Vec, usize), + __3: (usize, Token, usize), + __4: (usize, alloc::vec::Vec, usize), + __5: (usize, Token, usize), +) -> Character { + let __start0 = __2.0; + let __end0 = __2.2; + let __temp0 = __action123(__2); + let __temp0 = (__start0, __temp0, __end0); + __action191(__0, __1, __temp0, __3, __4, __5) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action245( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, alloc::vec::Vec, usize), + __4: (usize, Token, usize), +) -> Character { + let __start0 = __1.2; + let __end0 = __2.0; + let __temp0 = __action124(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action191(__0, __1, __temp0, __2, __3, __4) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action246( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, Token, usize), +) -> ArcState { + let __start0 = __2.2; + let __end0 = __3.0; + let __temp0 = __action107(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action47(__0, __1, __2, __temp0, __3) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action247( + __0: (usize, Token, usize), + __1: (usize, String, usize), + __2: (usize, Token, usize), + __3: (usize, alloc::vec::Vec, usize), + __4: (usize, Token, usize), +) -> ArcState { + let __start0 = __3.0; + let __end0 = __3.2; + let __temp0 = __action108(__3); + let __temp0 = (__start0, __temp0, __end0); + __action47(__0, __1, __2, __temp0, __4) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action248(__0: (usize, Value, usize)) -> Vec { + let __start0 = __0.0; + let __end0 = __0.2; + let __temp0 = __action141(__0); + let __temp0 = (__start0, __temp0, __end0); + __action184(__temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action249(__lookbehind: &usize, __lookahead: &usize) -> Vec { + let __start0 = *__lookbehind; + let __end0 = *__lookahead; + let __temp0 = __action142(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action184(__temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action250( + __0: (usize, alloc::vec::Vec, usize), + __1: (usize, Value, usize), +) -> Vec { + let __start0 = __1.0; + let __end0 = __1.2; + let __temp0 = __action141(__1); + let __temp0 = (__start0, __temp0, __end0); + __action185(__0, __temp0) +} + +#[allow( + clippy::too_many_arguments, + clippy::needless_lifetimes, + clippy::just_underscores_and_digits +)] +fn __action251(__0: (usize, alloc::vec::Vec, usize)) -> Vec { + let __start0 = __0.2; + let __end0 = __0.2; + let __temp0 = __action142(&__start0, &__end0); + let __temp0 = (__start0, __temp0, __end0); + __action185(__0, __temp0) +} +#[allow(clippy::type_complexity, dead_code)] + +pub trait __ToTriple { + fn to_triple( + value: Self, + ) -> Result< + (usize, Token, usize), + __lalrpop_util::ParseError, + >; +} + +impl __ToTriple for (usize, Token, usize) { + fn to_triple( + value: Self, + ) -> Result< + (usize, Token, usize), + __lalrpop_util::ParseError, + > { + Ok(value) + } +} +impl __ToTriple for Result<(usize, Token, usize), crate::syntax::ParseError> { + fn to_triple( + value: Self, + ) -> Result< + (usize, Token, usize), + __lalrpop_util::ParseError, + > { + match value { + | Ok(v) => Ok(v), + | Err(error) => Err(__lalrpop_util::ParseError::User { error }), + } + } +} diff --git a/src/syntax/prop_tests.rs b/src/syntax/prop_tests.rs new file mode 100644 index 0000000..58bf3a9 --- /dev/null +++ b/src/syntax/prop_tests.rs @@ -0,0 +1,1441 @@ +use proptest::{ + prelude::*, + strategy::BoxedStrategy, +}; + +use crate::syntax::{ + lexer::{ + Lexer, + Token, + }, + FileParser, +}; + +// ===== Generators for valid syntax elements ===== + +fn valid_ident() -> impl Strategy { + "[a-zA-Z_][a-zA-Z0-9_]{0,20}".prop_filter("not a keyword", |s| { + !matches!( + s.as_str(), + "use" | + "character" | + "template" | + "life_arc" | + "schedule" | + "behavior" | + "institution" | + "relationship" | + "location" | + "species" | + "enum" | + "state" | + "on" | + "as" | + "self" | + "other" | + "remove" | + "append" | + "forall" | + "exists" | + "in" | + "where" | + "and" | + "or" | + "not" | + "is" | + "true" | + "false" + ) + }) +} + +fn valid_string() -> impl Strategy { + // Strings without quotes or backslashes for simplicity + "[a-zA-Z0-9 ,.!?-]{0,50}" +} + +fn valid_int() -> impl Strategy { + -1000i64..1000i64 +} + +fn valid_float() -> impl Strategy { + (-1000.0..1000.0).prop_filter("finite", |f: &f64| f.is_finite()) +} + +fn valid_time() -> impl Strategy { + (0u8..24, 0u8..60, 0u8..60) +} + +fn valid_duration() -> impl Strategy { + (0u32..24, 0u32..60, 0u32..60) +} + +// ===== Lexer property tests ===== + +proptest! { + #[test] + fn test_lexer_doesnt_panic(s in "\\PC{0,100}") { + // Any string should not panic the lexer + let lexer = Lexer::new(&s); + let _tokens: Vec<_> = lexer.collect(); + } + + #[test] + fn test_valid_ident_tokenizes(name in valid_ident()) { + let lexer = Lexer::new(&name); + let tokens: Vec = lexer.map(|(_, tok, _)| tok).collect(); + assert_eq!(tokens.len(), 1); + match &tokens[0] { + Token::Ident(s) => assert_eq!(s, &name), + _ => panic!("Expected Ident token, got {:?}", tokens[0]), + } + } + + #[test] + fn test_valid_int_tokenizes(n in valid_int()) { + let input = n.to_string(); + let lexer = Lexer::new(&input); + let tokens: Vec = lexer.map(|(_, tok, _)| tok).collect(); + assert_eq!(tokens.len(), 1); + match tokens[0] { + Token::IntLit(val) => assert_eq!(val, n), + _ => panic!("Expected IntLit token"), + } + } + + #[test] + fn test_valid_float_tokenizes(n in valid_float()) { + let input = format!("{:.2}", n); + let lexer = Lexer::new(&input); + let tokens: Vec = lexer.map(|(_, tok, _)| tok).collect(); + assert_eq!(tokens.len(), 1); + match tokens[0] { + Token::FloatLit(_) => {}, + _ => panic!("Expected FloatLit token"), + } + } + + #[test] + fn test_valid_string_tokenizes(s in valid_string()) { + let input = format!("\"{}\"", s); + let lexer = Lexer::new(&input); + let tokens: Vec = lexer.map(|(_, tok, _)| tok).collect(); + assert_eq!(tokens.len(), 1); + match &tokens[0] { + Token::StringLit(val) => assert_eq!(val, &s), + _ => panic!("Expected StringLit token"), + } + } + + #[test] + fn test_time_literal_tokenizes(time in valid_time()) { + let (h, m, s) = time; + let input = format!("{:02}:{:02}:{:02}", h, m, s); + let lexer = Lexer::new(&input); + let tokens: Vec = lexer.map(|(_, tok, _)| tok).collect(); + assert_eq!(tokens.len(), 1); + match &tokens[0] { + Token::TimeLit(_) => {}, + _ => panic!("Expected TimeLit token"), + } + } + + #[test] + fn test_duration_literal_tokenizes(dur in valid_duration()) { + let (h, m, s) = dur; + let input = if h > 0 && m > 0 && s > 0 { + format!("{}h{}m{}s", h, m, s) + } else if h > 0 && m > 0 { + format!("{}h{}m", h, m) + } else if h > 0 { + format!("{}h", h) + } else if m > 0 { + format!("{}m", m) + } else { + format!("{}s", s) + }; + + let lexer = Lexer::new(&input); + let tokens: Vec = lexer.map(|(_, tok, _)| tok).collect(); + + if !input.is_empty() && input != "0h" && input != "0m" && input != "0s" { + assert!(!tokens.is_empty(), "Duration '{}' should tokenize", input); + } + } + + #[test] + fn test_keywords_are_distinct_from_idents( + keyword in prop::sample::select(vec![ + "character", "template", "enum", "use", "self", "other", + "and", "or", "not", "is", "true", "false" + ]) + ) { + let lexer = Lexer::new(keyword); + let tokens: Vec = lexer.map(|(_, tok, _)| tok).collect(); + assert_eq!(tokens.len(), 1); + // Should be a keyword token, not Ident + if let Token::Ident(_) = &tokens[0] { panic!("'{}' should be a keyword, not an Ident", keyword) } + } + + #[test] + fn test_whitespace_separates_tokens( + name1 in valid_ident(), + name2 in valid_ident(), + ws in "[ \t\n]{1,5}" + ) { + let input = format!("{}{}{}", name1, ws, name2); + let lexer = Lexer::new(&input); + let tokens: Vec = lexer.map(|(_, tok, _)| tok).collect(); + assert_eq!(tokens.len(), 2); + match (&tokens[0], &tokens[1]) { + (Token::Ident(s1), Token::Ident(s2)) => { + assert_eq!(s1, &name1); + assert_eq!(s2, &name2); + } + _ => panic!("Expected two Ident tokens"), + } + } +} + +// ===== Parser property tests ===== + +fn valid_field() -> impl Strategy { + (valid_ident(), valid_int().prop_map(|n| n.to_string())) +} + +fn valid_character() -> impl Strategy { + (valid_ident(), prop::collection::vec(valid_field(), 0..5)).prop_map(|(name, fields)| { + let fields_str = fields + .iter() + .map(|(k, v)| format!(" {}: {}", k, v)) + .collect::>() + .join("\n"); + format!("character {} {{\n{}\n}}", name, fields_str) + }) +} + +fn valid_template() -> impl Strategy { + ( + valid_ident(), + prop::collection::vec( + (valid_ident(), valid_int(), valid_int()).prop_map(|(name, lo, hi)| { + let (min, max) = if lo < hi { (lo, hi) } else { (hi, lo) }; + (name, format!("{}..{}", min, max)) + }), + 0..5, + ), + ) + .prop_map(|(name, fields)| { + let fields_str = fields + .iter() + .map(|(k, v)| format!(" {}: {}", k, v)) + .collect::>() + .join("\n"); + format!("template {} {{\n{}\n}}", name, fields_str) + }) +} + +fn valid_enum() -> impl Strategy { + (valid_ident(), prop::collection::vec(valid_ident(), 1..10)) + .prop_map(|(name, variants)| format!("enum {} {{ {} }}", name, variants.join(", "))) +} + +fn valid_schedule() -> impl Strategy { + (valid_ident(), prop::collection::vec(valid_time(), 1..5)).prop_map(|(name, times)| { + let blocks = times + .windows(2) + .map(|w| { + let (h1, m1, s1) = w[0]; + let (h2, m2, _) = w[1]; + format!( + " {:02}:{:02}:{:02} -> {:02}:{:02}:00: activity", + h1, m1, s1, h2, m2 + ) + }) + .collect::>() + .join("\n"); + format!("schedule {} {{\n{}\n}}", name, blocks) + }) +} + +fn valid_location() -> impl Strategy { + (valid_ident(), prop::collection::vec(valid_field(), 0..5)).prop_map(|(name, fields)| { + let fields_str = fields + .iter() + .map(|(k, v)| format!(" {}: {}", k, v)) + .collect::>() + .join("\n"); + format!("location {} {{\n{}\n}}", name, fields_str) + }) +} + +fn valid_species() -> impl Strategy { + (valid_ident(), prop::collection::vec(valid_field(), 0..5)).prop_map(|(name, fields)| { + let fields_str = fields + .iter() + .map(|(k, v)| format!(" {}: {}", k, v)) + .collect::>() + .join("\n"); + format!("species {} {{\n{}\n}}", name, fields_str) + }) +} + +fn valid_institution() -> impl Strategy { + (valid_ident(), prop::collection::vec(valid_field(), 0..5)).prop_map(|(name, fields)| { + let fields_str = fields + .iter() + .map(|(k, v)| format!(" {}: {}", k, v)) + .collect::>() + .join("\n"); + format!("institution {} {{\n{}\n}}", name, fields_str) + }) +} + +fn valid_relationship() -> impl Strategy { + ( + valid_ident(), + valid_ident(), + valid_ident(), + prop::collection::vec(valid_field(), 0..3), + ) + .prop_map(|(name, person1, person2, fields)| { + let fields_str = fields + .iter() + .map(|(k, v)| format!(" {}: {}", k, v)) + .collect::>() + .join("\n"); + format!( + "relationship {} {{\n {}\n {}\n{}\n}}", + name, person1, person2, fields_str + ) + }) +} + +proptest! { + #[test] + fn test_parser_doesnt_panic(input in "\\PC{0,200}") { + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let _ = parser.parse(lexer); + // Should not panic + } + + #[test] + fn test_valid_character_parses(input in valid_character()) { + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse valid character: {}\nError: {:?}", input, result.err()); + + if let Ok(file) = result { + assert_eq!(file.declarations.len(), 1); + match &file.declarations[0] { + crate::syntax::ast::Declaration::Character(_) => {}, + _ => panic!("Expected Character declaration"), + } + } + } + + #[test] + fn test_valid_template_parses(input in valid_template()) { + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse valid template: {}\nError: {:?}", input, result.err()); + + if let Ok(file) = result { + assert_eq!(file.declarations.len(), 1); + match &file.declarations[0] { + crate::syntax::ast::Declaration::Template(_) => {}, + _ => panic!("Expected Template declaration"), + } + } + } + + #[test] + fn test_valid_enum_parses(input in valid_enum()) { + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse valid enum: {}\nError: {:?}", input, result.err()); + + if let Ok(file) = result { + assert_eq!(file.declarations.len(), 1); + match &file.declarations[0] { + crate::syntax::ast::Declaration::Enum(_) => {}, + _ => panic!("Expected Enum declaration"), + } + } + } + + #[test] + fn test_valid_schedule_parses(input in valid_schedule()) { + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse valid schedule: {}\nError: {:?}", input, result.err()); + + if let Ok(file) = result { + assert_eq!(file.declarations.len(), 1); + match &file.declarations[0] { + crate::syntax::ast::Declaration::Schedule(_) => {}, + _ => panic!("Expected Schedule declaration"), + } + } + } + + #[test] + fn test_valid_location_parses(input in valid_location()) { + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse valid location: {}\nError: {:?}", input, result.err()); + + if let Ok(file) = result { + assert_eq!(file.declarations.len(), 1); + match &file.declarations[0] { + crate::syntax::ast::Declaration::Location(_) => {}, + _ => panic!("Expected Location declaration"), + } + } + } + + #[test] + fn test_valid_species_parses(input in valid_species()) { + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse valid species: {}\nError: {:?}", input, result.err()); + + if let Ok(file) = result { + assert_eq!(file.declarations.len(), 1); + match &file.declarations[0] { + crate::syntax::ast::Declaration::Species(_) => {}, + _ => panic!("Expected Species declaration"), + } + } + } + + #[test] + fn test_valid_institution_parses(input in valid_institution()) { + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse valid institution: {}\nError: {:?}", input, result.err()); + + if let Ok(file) = result { + assert_eq!(file.declarations.len(), 1); + match &file.declarations[0] { + crate::syntax::ast::Declaration::Institution(_) => {}, + _ => panic!("Expected Institution declaration"), + } + } + } + + #[test] + fn test_valid_relationship_parses(input in valid_relationship()) { + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse valid relationship: {}\nError: {:?}", input, result.err()); + + if let Ok(file) = result { + assert_eq!(file.declarations.len(), 1); + match &file.declarations[0] { + crate::syntax::ast::Declaration::Relationship(_) => {}, + _ => panic!("Expected Relationship declaration"), + } + } + } + + #[test] + fn test_multiple_declarations_parse( + chars in prop::collection::vec(valid_character(), 0..3), + templates in prop::collection::vec(valid_template(), 0..3), + enums in prop::collection::vec(valid_enum(), 0..3), + ) { + let mut all = chars; + all.extend(templates); + all.extend(enums); + let input = all.join("\n\n"); + + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + + if !all.is_empty() { + assert!(result.is_ok(), "Failed to parse multiple declarations:\n{}\nError: {:?}", input, result.err()); + if let Ok(file) = result { + assert_eq!(file.declarations.len(), all.len()); + } + } + } +} + +// ===== Life Arc generators ===== + +fn valid_comparison_expr() -> impl Strategy { + ( + valid_ident(), + prop::sample::select(vec![">", ">=", "<", "<="]), + prop_oneof![ + valid_int().prop_map(|n| n.to_string()), + valid_float().prop_map(|f| format!("{:.2}", f)), + ], + ) + .prop_map(|(ident, op, val)| format!("{} {} {}", ident, op, val)) +} + +fn valid_equality_expr() -> impl Strategy { + ( + valid_ident(), + prop_oneof![ + valid_int().prop_map(|n| n.to_string()), + valid_float().prop_map(|f| format!("{:.2}", f)), + valid_string().prop_map(|s| format!("\"{}\"", s)), + Just("true".to_string()), + Just("false".to_string()), + ], + ) + .prop_map(|(ident, val)| format!("{} is {}", ident, val)) +} + +fn valid_logical_and_expr() -> impl Strategy { + (valid_comparison_expr(), valid_comparison_expr()) + .prop_map(|(left, right)| format!("{} and {}", left, right)) +} + +fn valid_logical_or_expr() -> impl Strategy { + (valid_ident(), valid_ident()).prop_map(|(left, right)| format!("{} or {}", left, right)) +} + +fn valid_logical_not_expr() -> impl Strategy { + valid_ident().prop_map(|ident| format!("not {}", ident)) +} + +fn valid_field_access_expr() -> impl Strategy { + (prop::sample::select(vec!["self", "other"]), valid_ident()) + .prop_map(|(base, field)| format!("{}.{}", base, field)) +} + +fn valid_field_access_comparison() -> impl Strategy { + ( + valid_field_access_expr(), + prop::sample::select(vec![">", ">=", "<", "<="]), + prop_oneof![valid_int().prop_map(|n| n.to_string()), valid_ident(),], + ) + .prop_map(|(field, op, val)| format!("{} {} {}", field, op, val)) +} + +fn valid_transition_condition() -> impl Strategy { + prop_oneof![ + valid_ident(), // Simple identifier + valid_int().prop_map(|n| n.to_string()), // Literal int + Just("true".to_string()), // Boolean literal + Just("false".to_string()), + valid_comparison_expr(), // Comparison expression + valid_equality_expr(), // Equality expression + valid_logical_and_expr(), // Logical AND + valid_logical_or_expr(), // Logical OR + valid_logical_not_expr(), // Logical NOT + valid_field_access_expr(), // Field access + valid_field_access_comparison(), // Field access with comparison + ] +} + +fn valid_transition() -> impl Strategy { + (valid_transition_condition(), valid_ident()) + .prop_map(|(cond, target)| format!(" on {} -> {}", cond, target)) +} + +fn valid_arc_state() -> impl Strategy { + ( + valid_ident(), + prop::collection::vec(valid_transition(), 0..3), + ) + .prop_map(|(state_name, transitions)| { + let trans_str = transitions.join("\n"); + if transitions.is_empty() { + format!(" state {} {{}}", state_name) + } else { + format!(" state {} {{\n{}\n }}", state_name, trans_str) + } + }) +} + +fn valid_life_arc() -> impl Strategy { + ( + valid_ident(), + prop::collection::vec(valid_arc_state(), 1..5), + ) + .prop_map(|(name, states)| { + let states_str = states.join("\n"); + format!("life_arc {} {{\n{}\n}}", name, states_str) + }) +} + +// ===== Behavior Tree generators ===== + +fn valid_action_node() -> impl Strategy { + ( + valid_ident(), + prop::option::of(prop::collection::vec(valid_field(), 0..3)), + ) + .prop_map(|(name, params)| match params { + | None => name, + | Some(params) if params.is_empty() => format!("{}()", name), + | Some(params) => { + let params_str = params + .iter() + .map(|(k, v)| format!("{}: {}", k, v)) + .collect::>() + .join(", "); + format!("{}({})", name, params_str) + }, + }) +} + +fn valid_behavior_node_depth(depth: u32) -> BoxedStrategy { + if depth == 0 { + // Base case: just actions or subtrees + prop_oneof![ + valid_action_node(), + valid_ident().prop_map(|name| format!("@{}", name)), + ] + .boxed() + } else { + // Recursive case: can be action, subtree, selector, or sequence + let action = valid_action_node(); + let subtree = valid_ident().prop_map(|name| format!("@{}", name)); + + let selector = prop::collection::vec(valid_behavior_node_depth(depth - 1), 1..3).prop_map( + |children| { + let children_str = children + .iter() + .map(|c| format!(" {}", c)) + .collect::>() + .join("\n"); + format!("? {{\n{}\n }}", children_str) + }, + ); + + let sequence = prop::collection::vec(valid_behavior_node_depth(depth - 1), 1..3).prop_map( + |children| { + let children_str = children + .iter() + .map(|c| format!(" {}", c)) + .collect::>() + .join("\n"); + format!("> {{\n{}\n }}", children_str) + }, + ); + + prop_oneof![action, subtree, selector, sequence,].boxed() + } +} + +fn valid_behavior_tree() -> impl Strategy { + ( + valid_ident(), + valid_behavior_node_depth(2), // Max depth 2 to keep tests fast + ) + .prop_map(|(name, root)| format!("behavior {} {{\n {}\n}}", name, root)) +} + +proptest! { + #[test] + fn test_valid_life_arc_parses(input in valid_life_arc()) { + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse valid life_arc: {}\nError: {:?}", input, result.err()); + + if let Ok(file) = result { + assert_eq!(file.declarations.len(), 1); + match &file.declarations[0] { + crate::syntax::ast::Declaration::LifeArc(_) => {}, + _ => panic!("Expected LifeArc declaration"), + } + } + } + + #[test] + fn test_valid_behavior_tree_parses(input in valid_behavior_tree()) { + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse valid behavior: {}\nError: {:?}", input, result.err()); + + if let Ok(file) = result { + assert_eq!(file.declarations.len(), 1); + match &file.declarations[0] { + crate::syntax::ast::Declaration::Behavior(_) => {}, + _ => panic!("Expected Behavior declaration"), + } + } + } + + // ===== Comprehensive edge case tests ===== + + #[test] + fn test_life_arc_with_no_transitions(name in valid_ident(), state_name in valid_ident()) { + let input = format!("life_arc {} {{\n state {} {{}}\n}}", name, state_name); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse life arc with no transitions: {:?}", result.err()); + } + + #[test] + fn test_life_arc_with_multiple_transitions( + name in valid_ident(), + state_name in valid_ident(), + targets in prop::collection::vec(valid_ident(), 2..5) + ) { + let transitions = targets.iter() + .map(|target| format!(" on ready -> {}", target)) + .collect::>() + .join("\n"); + let input = format!("life_arc {} {{\n state {} {{\n{}\n }}\n}}", name, state_name, transitions); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse life arc with multiple transitions: {:?}", result.err()); + } + + #[test] + fn test_behavior_tree_deeply_nested(name in valid_ident()) { + let input = format!( + "behavior {} {{\n > {{\n ? {{\n > {{\n action\n }}\n }}\n }}\n}}", + name + ); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse deeply nested behavior tree: {:?}", result.err()); + } + + #[test] + fn test_behavior_tree_with_action_params( + name in valid_ident(), + action in valid_ident(), + params in prop::collection::vec(valid_field(), 1..4) + ) { + let params_str = params.iter() + .map(|(k, v)| format!("{}: {}", k, v)) + .collect::>() + .join(", "); + let input = format!("behavior {} {{\n {}({})\n}}", name, action, params_str); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse behavior with action params: {:?}", result.err()); + } + + #[test] + fn test_behavior_tree_with_subtree_reference( + name in valid_ident(), + subtree_path in prop::collection::vec(valid_ident(), 1..3) + ) { + let path = subtree_path.join("::"); + let input = format!("behavior {} {{\n @{}\n}}", name, path); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse behavior with subtree: {:?}", result.err()); + } + + #[test] + fn test_behavior_selector_with_many_children( + name in valid_ident(), + children in prop::collection::vec(valid_ident(), 2..10) + ) { + let children_str = children.iter() + .map(|c| format!(" {}", c)) + .collect::>() + .join("\n"); + let input = format!("behavior {} {{\n ? {{\n{}\n }}\n}}", name, children_str); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse selector with many children: {:?}", result.err()); + } + + #[test] + fn test_behavior_sequence_with_many_children( + name in valid_ident(), + children in prop::collection::vec(valid_ident(), 2..10) + ) { + let children_str = children.iter() + .map(|c| format!(" {}", c)) + .collect::>() + .join("\n"); + let input = format!("behavior {} {{\n > {{\n{}\n }}\n}}", name, children_str); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse sequence with many children: {:?}", result.err()); + } + + #[test] + fn test_life_arc_transition_with_literal_condition( + name in valid_ident(), + state_name in valid_ident(), + target in valid_ident(), + val in valid_int() + ) { + let input = format!("life_arc {} {{\n state {} {{\n on {} -> {}\n }}\n}}", name, state_name, val, target); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse life arc with literal condition: {:?}", result.err()); + } + + #[test] + fn test_life_arc_transition_with_bool_condition( + name in valid_ident(), + state_name in valid_ident(), + target in valid_ident(), + val in prop::sample::select(vec![true, false]) + ) { + let input = format!("life_arc {} {{\n state {} {{\n on {} -> {}\n }}\n}}", name, state_name, val, target); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse life arc with bool condition: {:?}", result.err()); + } + + // ===== Comparison expression tests ===== + + #[test] + fn test_comparison_all_operators( + ident in valid_ident(), + val in valid_int() + ) { + for op in &[">", ">=", "<", "<="] { + let comp = format!("{} {} {}", ident, op, val); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse comparison '{}': {:?}", comp, result.err()); + } + } + + #[test] + fn test_comparison_with_int(comp in valid_comparison_expr()) { + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse comparison '{}': {:?}", comp, result.err()); + } + + #[test] + fn test_comparison_gt(ident in valid_ident(), val in valid_int()) { + let comp = format!("{} > {}", ident, val); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse > comparison: {:?}", result.err()); + } + + #[test] + fn test_comparison_gte(ident in valid_ident(), val in valid_int()) { + let comp = format!("{} >= {}", ident, val); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse >= comparison: {:?}", result.err()); + } + + #[test] + fn test_comparison_lt(ident in valid_ident(), val in valid_int()) { + let comp = format!("{} < {}", ident, val); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse < comparison: {:?}", result.err()); + } + + #[test] + fn test_comparison_lte(ident in valid_ident(), val in valid_int()) { + let comp = format!("{} <= {}", ident, val); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse <= comparison: {:?}", result.err()); + } + + #[test] + fn test_comparison_with_float( + ident in valid_ident(), + val in valid_float() + ) { + let comp = format!("{} > {:.2}", ident, val); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse float comparison: {:?}", result.err()); + } + + #[test] + fn test_comparison_with_negative_int( + ident in valid_ident(), + val in -100i64..0i64 + ) { + let comp = format!("{} < {}", ident, val); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse comparison with negative: {:?}", result.err()); + } + + #[test] + fn test_multiple_transitions_with_comparisons( + name in valid_ident(), + state_name in valid_ident(), + comparisons in prop::collection::vec( + (valid_ident(), prop::sample::select(vec![">", "<", ">=", "<="]), valid_int(), valid_ident()), + 2..5 + ) + ) { + let transitions = comparisons.iter() + .map(|(var, op, val, target)| format!(" on {} {} {} -> {}", var, op, val, target)) + .collect::>() + .join("\n"); + let input = format!("life_arc {} {{\n state {} {{\n{}\n }}\n}}", name, state_name, transitions); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse multiple comparison transitions: {:?}", result.err()); + } + + // ===== Equality expression tests ===== + + #[test] + fn test_equality_with_string( + ident in valid_ident(), + val in valid_string() + ) { + let comp = format!("{} is \"{}\"", ident, val); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse equality with string: {:?}", result.err()); + } + + #[test] + fn test_equality_with_int( + ident in valid_ident(), + val in valid_int() + ) { + let comp = format!("{} is {}", ident, val); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse equality with int: {:?}", result.err()); + } + + #[test] + fn test_equality_with_float( + ident in valid_ident(), + val in valid_float() + ) { + let comp = format!("{} is {:.2}", ident, val); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse equality with float: {:?}", result.err()); + } + + #[test] + fn test_equality_with_bool( + ident in valid_ident(), + val in prop::sample::select(vec![true, false]) + ) { + let comp = format!("{} is {}", ident, val); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", comp); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse equality with bool: {:?}", result.err()); + } + + #[test] + fn test_equality_multiple_transitions( + name in valid_ident(), + state_name in valid_ident(), + equalities in prop::collection::vec( + (valid_ident(), prop_oneof![ + valid_int().prop_map(|n| n.to_string()), + valid_string().prop_map(|s| format!("\"{}\"", s)), + Just("true".to_string()), + Just("false".to_string()), + ], valid_ident()), + 2..5 + ) + ) { + let transitions = equalities.iter() + .map(|(var, val, target)| format!(" on {} is {} -> {}", var, val, target)) + .collect::>() + .join("\n"); + let input = format!("life_arc {} {{\n state {} {{\n{}\n }}\n}}", name, state_name, transitions); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse multiple equality transitions: {:?}", result.err()); + } + + #[test] + fn test_equality_mixed_with_comparisons( + name in valid_ident(), + state_name in valid_ident() + ) { + let input = format!( + "life_arc {} {{\n state {} {{\n on age > 12 -> teen\n on status is active -> active_state\n on energy < 0.3 -> tired\n on completed is true -> done\n }}\n}}", + name, state_name + ); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse mixed equality and comparisons: {:?}", result.err()); + } + + // ===== Logical operator tests ===== + + #[test] + fn test_logical_and( + ident1 in valid_ident(), + ident2 in valid_ident(), + val1 in valid_int(), + val2 in valid_int() + ) { + let cond = format!("{} > {} and {} < {}", ident1, val1, ident2, val2); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse 'and' operator: {:?}", result.err()); + } + + #[test] + fn test_logical_or( + ident1 in valid_ident(), + ident2 in valid_ident(), + val1 in valid_int(), + val2 in valid_int() + ) { + let cond = format!("{} > {} or {} < {}", ident1, val1, ident2, val2); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse 'or' operator: {:?}", result.err()); + } + + #[test] + fn test_logical_not_with_identifier(ident in valid_ident()) { + let cond = format!("not {}", ident); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse 'not' operator with identifier: {:?}", result.err()); + } + + #[test] + fn test_logical_not_with_comparison( + ident in valid_ident(), + val in valid_int() + ) { + let cond = format!("not {} > {}", ident, val); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse 'not' with comparison: {:?}", result.err()); + } + + #[test] + fn test_and_with_equality( + ident1 in valid_ident(), + ident2 in valid_ident(), + val in valid_string() + ) { + let cond = format!("{} is true and {} is \"{}\"", ident1, ident2, val); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse 'and' with equality: {:?}", result.err()); + } + + #[test] + fn test_or_with_equality( + ident1 in valid_ident(), + ident2 in valid_ident() + ) { + let cond = format!("{} is false or {} is true", ident1, ident2); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse 'or' with equality: {:?}", result.err()); + } + + #[test] + fn test_chained_and( + ident1 in valid_ident(), + ident2 in valid_ident(), + ident3 in valid_ident(), + val1 in valid_int(), + val2 in valid_int(), + val3 in valid_int() + ) { + let cond = format!("{} > {} and {} < {} and {} is {}", ident1, val1, ident2, val2, ident3, val3); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse chained 'and': {:?}", result.err()); + } + + #[test] + fn test_chained_or( + ident1 in valid_ident(), + ident2 in valid_ident(), + ident3 in valid_ident() + ) { + let cond = format!("{} or {} or {}", ident1, ident2, ident3); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse chained 'or': {:?}", result.err()); + } + + #[test] + fn test_mixed_and_or( + ident1 in valid_ident(), + ident2 in valid_ident(), + ident3 in valid_ident(), + val1 in valid_int(), + val2 in valid_int() + ) { + // Tests precedence: 'and' binds tighter than 'or' + // This should parse as: (ident1 > val1 and ident2 < val2) or ident3 + let cond = format!("{} > {} and {} < {} or {}", ident1, val1, ident2, val2, ident3); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse mixed 'and'/'or': {:?}", result.err()); + } + + #[test] + fn test_not_not(ident in valid_ident()) { + let cond = format!("not not {}", ident); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse double 'not': {:?}", result.err()); + } + + #[test] + fn test_not_with_and( + ident1 in valid_ident(), + ident2 in valid_ident() + ) { + // Tests that 'not' binds tighter than 'and' + // This should parse as: (not ident1) and ident2 + let cond = format!("not {} and {}", ident1, ident2); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse 'not' with 'and': {:?}", result.err()); + } + + #[test] + fn test_complex_nested_logic( + name in valid_ident(), + state_name in valid_ident() + ) { + let input = format!( + "life_arc {} {{\n state {} {{\n on age > 18 and status is active and energy > 0.5 -> state1\n on tired or hungry or sick -> state2\n on not ready and not completed -> state3\n on health > 50 and not sick or emergency -> state4\n }}\n}}", + name, state_name + ); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse complex nested logic: {:?}", result.err()); + } + + // ===== Field access tests ===== + + #[test] + fn test_field_access_self(field in valid_ident(), val in valid_int()) { + let cond = format!("self.{} > {}", field, val); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse self field access: {:?}", result.err()); + } + + #[test] + fn test_field_access_other(field in valid_ident(), val in valid_int()) { + let cond = format!("other.{} < {}", field, val); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse other field access: {:?}", result.err()); + } + + #[test] + fn test_field_access_with_equality( + field in valid_ident(), + val in valid_string() + ) { + let cond = format!("self.{} is \"{}\"", field, val); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse field access with equality: {:?}", result.err()); + } + + #[test] + fn test_field_access_with_bool(field in valid_ident()) { + let cond = format!("self.{} is true", field); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse field access with bool: {:?}", result.err()); + } + + #[test] + fn test_nested_field_access( + field1 in valid_ident(), + field2 in valid_ident(), + val in valid_int() + ) { + let cond = format!("self.{}.{} > {}", field1, field2, val); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse nested field access: {:?}", result.err()); + } + + #[test] + fn test_field_access_with_and( + field1 in valid_ident(), + field2 in valid_ident(), + val1 in valid_int(), + val2 in valid_int() + ) { + let cond = format!("self.{} > {} and other.{} < {}", field1, val1, field2, val2); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse field access with 'and': {:?}", result.err()); + } + + #[test] + fn test_field_access_with_or( + field1 in valid_ident(), + field2 in valid_ident() + ) { + let cond = format!("self.{} or other.{}", field1, field2); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse field access with 'or': {:?}", result.err()); + } + + #[test] + fn test_field_access_with_not(field in valid_ident()) { + let cond = format!("not self.{}", field); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse field access with 'not': {:?}", result.err()); + } + + #[test] + fn test_field_access_both_sides( + field1 in valid_ident(), + field2 in valid_ident() + ) { + let cond = format!("self.{} > other.{}", field1, field2); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse field access on both sides: {:?}", result.err()); + } + + #[test] + fn test_field_access_vs_identifier( + field in valid_ident(), + ident in valid_ident() + ) { + let cond = format!("self.{} > {}", field, ident); + let input = format!("life_arc Test {{\n state s {{\n on {} -> next\n }}\n}}", cond); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse field access vs identifier: {:?}", result.err()); + } + + #[test] + fn test_complex_field_access( + name in valid_ident(), + state_name in valid_ident() + ) { + let input = format!( + "life_arc {} {{\n state {} {{\n on self.age > 18 and self.status is active -> state1\n on other.bond < 0.3 or self.energy < 0.2 -> state2\n on not self.ready and other.level > 5 -> state3\n on self.health > other.health -> state4\n }}\n}}", + name, state_name + ); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse complex field access: {:?}", result.err()); + } +} + +// ===== Prose block property tests ===== + +fn valid_prose_content() -> impl Strategy { + // Prose content without --- at line start + prop::string::string_regex("[^\n]*(\n[^-][^\n]*)*").unwrap() +} + +proptest! { + #[test] + fn test_prose_block_roundtrip( + tag in valid_ident(), + content in valid_prose_content() + ) { + let input = format!("---{}\n{}\n---", tag, content); + let lexer = Lexer::new(&input); + let tokens: Vec = lexer.map(|(_, tok, _)| tok).collect(); + + assert_eq!(tokens.len(), 1); + match &tokens[0] { + Token::ProseBlock(pb) => { + assert_eq!(pb.tag, tag); + assert_eq!(pb.content.trim(), content.trim()); + } + _ => panic!("Expected ProseBlock token"), + } + } + + #[test] + fn test_character_with_prose( + name in valid_ident(), + tag in valid_ident(), + content in valid_prose_content() + ) { + let input = format!( + "character {} {{\n {}: ---{}\n{}\n---\n}}", + name, tag, tag, content + ); + + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + + assert!(result.is_ok(), "Failed to parse character with prose:\n{}\nError: {:?}", input, result.err()); + } +} + +// ===== Edge case tests ===== + +#[cfg(test)] +mod edge_cases { + use super::*; + + #[test] + fn test_empty_input_parses() { + let lexer = Lexer::new(""); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok()); + assert_eq!(result.unwrap().declarations.len(), 0); + } + + proptest! { + + #[test] + fn test_only_whitespace_parses(ws in "[ \t\n]{1,100}") { + let lexer = Lexer::new(&ws); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok()); + assert_eq!(result.unwrap().declarations.len(), 0); + } + + #[test] + fn test_only_comments_parses( + n in 1usize..10, + comment_content in valid_string() + ) { + let input = (0..n) + .map(|_| format!("// {}", comment_content)) + .collect::>() + .join("\n"); + + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok()); + assert_eq!(result.unwrap().declarations.len(), 0); + } + + #[test] + fn test_unicode_in_strings(s in "[^\"\\\\ ]{1,20}") { + let input = format!("character Test {{ name: \"{}\" }}", s); + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + // Should either parse or fail gracefully + let _ = result; + } + + #[test] + fn test_nested_objects(depth in 1usize..4) { + let mut input = String::from("character Test { data: "); + for _ in 0..depth { + input.push_str("{ inner: "); + } + input.push_str("42"); + for _ in 0..depth { + input.push_str(" }"); + } + input.push_str(" }"); + + let lexer = Lexer::new(&input); + let parser = FileParser::new(); + let result = parser.parse(lexer); + assert!(result.is_ok(), "Failed to parse nested objects (depth {}): {}", depth, input); + } + } +} diff --git a/src/types.rs b/src/types.rs new file mode 100644 index 0000000..81b46ab --- /dev/null +++ b/src/types.rs @@ -0,0 +1,228 @@ +//! Public types for resolved Storybook entities +//! +//! These types represent fully resolved, validated entities after the +//! resolution pipeline completes. Unlike the AST types which represent +//! raw parsed syntax, these types: +//! - Have all cross-references resolved +//! - Have all overrides applied +//! - Have passed semantic validation +//! - Are ready for consumption by the game engine + +use std::collections::HashMap; + +use crate::syntax::ast::{ + BehaviorNode, + Participant, + ProseBlock, + Span, + Time, + Transition, + Value, +}; + +/// A fully resolved Storybook project +#[derive(Debug, Clone)] +pub struct ResolvedFile { + pub declarations: Vec, +} + +/// A resolved top-level declaration +#[derive(Debug, Clone)] +pub enum ResolvedDeclaration { + Character(ResolvedCharacter), + Template(ResolvedTemplate), + LifeArc(ResolvedLifeArc), + Schedule(ResolvedSchedule), + Behavior(ResolvedBehavior), + Institution(ResolvedInstitution), + Relationship(ResolvedRelationship), + Location(ResolvedLocation), + Species(ResolvedSpecies), + Enum(ResolvedEnum), +} + +/// A character with all templates applied and references resolved +#[derive(Debug, Clone, PartialEq)] +pub struct ResolvedCharacter { + pub name: String, + pub fields: HashMap, + pub prose_blocks: HashMap, + pub span: Span, +} + +/// A template definition (before instantiation) +#[derive(Debug, Clone, PartialEq)] +pub struct ResolvedTemplate { + pub name: String, + pub fields: HashMap, + pub span: Span, +} + +/// A life arc with validated state transitions +#[derive(Debug, Clone, PartialEq)] +pub struct ResolvedLifeArc { + pub name: String, + pub states: Vec, + pub span: Span, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct ResolvedArcState { + pub name: String, + pub transitions: Vec, + pub span: Span, +} + +/// A schedule with validated non-overlapping blocks +#[derive(Debug, Clone, PartialEq)] +pub struct ResolvedSchedule { + pub name: String, + pub blocks: Vec, + pub span: Span, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct ResolvedScheduleBlock { + pub activity: String, + pub start: Time, + pub end: Time, + pub span: Span, +} + +/// A behavior tree with validated actions +#[derive(Debug, Clone, PartialEq)] +pub struct ResolvedBehavior { + pub name: String, + pub root: BehaviorNode, + pub span: Span, +} + +/// An institution with resolved member references +#[derive(Debug, Clone, PartialEq)] +pub struct ResolvedInstitution { + pub name: String, + pub fields: HashMap, + pub span: Span, +} + +/// A bidirectional relationship with merged self/other blocks +#[derive(Debug, Clone, PartialEq)] +pub struct ResolvedRelationship { + pub name: String, + pub participants: Vec, + pub fields: HashMap, + pub span: Span, +} + +/// A location definition +#[derive(Debug, Clone, PartialEq)] +pub struct ResolvedLocation { + pub name: String, + pub fields: HashMap, + pub span: Span, +} + +/// A species definition +#[derive(Debug, Clone, PartialEq)] +pub struct ResolvedSpecies { + pub name: String, + pub fields: HashMap, + pub span: Span, +} + +/// An enum definition with variants +#[derive(Debug, Clone, PartialEq)] +pub struct ResolvedEnum { + pub name: String, + pub variants: Vec, + pub span: Span, +} + +impl ResolvedFile { + /// Get all characters in the file + pub fn characters(&self) -> impl Iterator { + self.declarations.iter().filter_map(|decl| match decl { + | ResolvedDeclaration::Character(c) => Some(c), + | _ => None, + }) + } + + /// Get all relationships in the file + pub fn relationships(&self) -> impl Iterator { + self.declarations.iter().filter_map(|decl| match decl { + | ResolvedDeclaration::Relationship(r) => Some(r), + | _ => None, + }) + } + + /// Get all institutions in the file + pub fn institutions(&self) -> impl Iterator { + self.declarations.iter().filter_map(|decl| match decl { + | ResolvedDeclaration::Institution(i) => Some(i), + | _ => None, + }) + } + + /// Get all schedules in the file + pub fn schedules(&self) -> impl Iterator { + self.declarations.iter().filter_map(|decl| match decl { + | ResolvedDeclaration::Schedule(s) => Some(s), + | _ => None, + }) + } + + /// Get all behavior trees in the file + pub fn behaviors(&self) -> impl Iterator { + self.declarations.iter().filter_map(|decl| match decl { + | ResolvedDeclaration::Behavior(b) => Some(b), + | _ => None, + }) + } + + /// Get all life arcs in the file + pub fn life_arcs(&self) -> impl Iterator { + self.declarations.iter().filter_map(|decl| match decl { + | ResolvedDeclaration::LifeArc(la) => Some(la), + | _ => None, + }) + } + + /// Get all locations in the file + pub fn locations(&self) -> impl Iterator { + self.declarations.iter().filter_map(|decl| match decl { + | ResolvedDeclaration::Location(l) => Some(l), + | _ => None, + }) + } + + /// Get all species in the file + pub fn species(&self) -> impl Iterator { + self.declarations.iter().filter_map(|decl| match decl { + | ResolvedDeclaration::Species(s) => Some(s), + | _ => None, + }) + } + + /// Get all enums in the file + pub fn enums(&self) -> impl Iterator { + self.declarations.iter().filter_map(|decl| match decl { + | ResolvedDeclaration::Enum(e) => Some(e), + | _ => None, + }) + } + + /// Find a character by name + pub fn find_character(&self, name: &str) -> Option<&ResolvedCharacter> { + self.characters().find(|c| c.name == name) + } + + /// Find a relationship by name + pub fn find_relationship(&self, name: &str) -> Option<&ResolvedRelationship> { + self.relationships().find(|r| r.name == name) + } + + /// Find an institution by name + pub fn find_institution(&self, name: &str) -> Option<&ResolvedInstitution> { + self.institutions().find(|i| i.name == name) + } +} diff --git a/tests/cli_integration.rs b/tests/cli_integration.rs new file mode 100644 index 0000000..f50864d --- /dev/null +++ b/tests/cli_integration.rs @@ -0,0 +1,546 @@ +//! Integration tests for the CLI tool +//! +//! These tests verify that the `sb` command-line tool works correctly +//! by testing it against real project files. + +use std::{ + fs, + path::PathBuf, + process::Command, +}; + +use tempfile::TempDir; + +/// Helper to get the path to the compiled sb binary +fn sb_binary() -> PathBuf { + let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + path.push("target"); + path.push("debug"); + path.push("sb"); + path +} + +/// Helper to create a temporary test project +fn create_test_project() -> TempDir { + let dir = TempDir::new().unwrap(); + + // Create a valid character file + fs::write( + dir.path().join("test.sb"), + r#" +character Martha { + age: 34 + trust: 0.8 +} + +character David { + age: 42 + health: 0.9 +} +"#, + ) + .unwrap(); + + dir +} + +/// Helper to create a project with errors +fn create_invalid_project() -> TempDir { + let dir = TempDir::new().unwrap(); + + fs::write( + dir.path().join("errors.sb"), + r#" +character Martha { + age: 200 + trust: 1.5 +} +"#, + ) + .unwrap(); + + dir +} + +#[test] +fn test_validate_valid_project() { + let project = create_test_project(); + + let output = Command::new(sb_binary()) + .arg("validate") + .arg(project.path()) + .output() + .expect("Failed to execute sb validate"); + + assert!( + output.status.success(), + "Validation should succeed for valid project. Stderr: {}", + String::from_utf8_lossy(&output.stderr) + ); + + let stdout = String::from_utf8_lossy(&output.stdout); + assert!(stdout.contains("✓ Validation successful")); + assert!(stdout.contains("Characters: 2")); +} + +#[test] +fn test_validate_invalid_project() { + let project = create_invalid_project(); + + let output = Command::new(sb_binary()) + .arg("validate") + .arg(project.path()) + .output() + .expect("Failed to execute sb validate"); + + assert!( + !output.status.success(), + "Validation should fail for invalid project" + ); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!(stderr.contains("Found 2 errors")); + assert!(stderr.contains("Trait 'age' has value 200")); + assert!(stderr.contains("Trait 'trust' has value 1.5")); +} + +#[test] +fn test_validate_single_file() { + let project = create_test_project(); + let file_path = project.path().join("test.sb"); + + let output = Command::new(sb_binary()) + .arg("validate") + .arg(&file_path) + .output() + .expect("Failed to execute sb validate"); + + assert!( + output.status.success(), + "Validation should succeed for valid file" + ); + + let stdout = String::from_utf8_lossy(&output.stdout); + assert!(stdout.contains("✓ Validation successful")); +} + +#[test] +fn test_validate_nonexistent_path() { + let output = Command::new(sb_binary()) + .arg("validate") + .arg("/nonexistent/path/to/project") + .output() + .expect("Failed to execute sb validate"); + + assert!(!output.status.success(), "Should fail for nonexistent path"); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!(stderr.contains("Path does not exist")); +} + +#[test] +fn test_inspect_character() { + let project = create_test_project(); + + let output = Command::new(sb_binary()) + .arg("inspect") + .arg("Martha") + .arg("--path") + .arg(project.path()) + .output() + .expect("Failed to execute sb inspect"); + + assert!(output.status.success(), "Inspect should succeed"); + + let stdout = String::from_utf8_lossy(&output.stdout); + assert!(stdout.contains("Character: Martha")); + assert!(stdout.contains("age")); + assert!(stdout.contains("34")); +} + +#[test] +fn test_inspect_nonexistent_entity() { + let project = create_test_project(); + + let output = Command::new(sb_binary()) + .arg("inspect") + .arg("NonExistent") + .arg("--path") + .arg(project.path()) + .output() + .expect("Failed to execute sb inspect"); + + assert!(output.status.success(), "Inspect runs even if not found"); + + let stdout = String::from_utf8_lossy(&output.stdout); + assert!(stdout.contains("not found")); +} + +#[test] +fn test_validate_empty_project() { + let dir = TempDir::new().unwrap(); + + let output = Command::new(sb_binary()) + .arg("validate") + .arg(dir.path()) + .output() + .expect("Failed to execute sb validate"); + + assert!(!output.status.success(), "Should fail for empty project"); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!(stderr.contains("No .sb files found")); +} + +#[test] +fn test_validate_shows_multiple_errors() { + let dir = TempDir::new().unwrap(); + + fs::write( + dir.path().join("multi_error.sb"), + r#" +character Alice { + age: 200 + trust: 1.5 + bond: -0.2 +} + +character Bob { + age: -10 + love: 3.0 +} +"#, + ) + .unwrap(); + + let output = Command::new(sb_binary()) + .arg("validate") + .arg(dir.path()) + .output() + .expect("Failed to execute sb validate"); + + assert!( + !output.status.success(), + "Should fail with validation errors" + ); + + let stderr = String::from_utf8_lossy(&output.stderr); + + // Should show all 5 errors (non-fail-fast) + assert!(stderr.contains("Found 5 errors")); + assert!(stderr.contains("age")); // age: 200 + assert!(stderr.contains("trust")); // trust: 1.5 + assert!(stderr.contains("bond")); // bond: -0.2 + assert!(stderr.contains("-10")); // age: -10 + assert!(stderr.contains("love")); // love: 3.0 +} + +#[test] +fn test_cross_file_name_resolution() { + let dir = TempDir::new().unwrap(); + + // Create multiple files with characters + fs::write( + dir.path().join("file1.sb"), + r#" +character Martha { + age: 34 + trust: 0.8 +} +"#, + ) + .unwrap(); + + fs::write( + dir.path().join("file2.sb"), + r#" +character David { + age: 42 + health: 0.9 +} +"#, + ) + .unwrap(); + + let output = Command::new(sb_binary()) + .arg("validate") + .arg(dir.path()) + .output() + .expect("Failed to execute sb validate"); + + assert!( + output.status.success(), + "Should successfully load and validate multiple files" + ); + + let stdout = String::from_utf8_lossy(&output.stdout); + assert!(stdout.contains("✓ Validation successful")); + assert!(stdout.contains("Characters: 2")); +} + +#[test] +fn test_cross_file_duplicate_detection() { + let dir = TempDir::new().unwrap(); + + // Create two files with the same character name + fs::write( + dir.path().join("file1.sb"), + r#" +character Martha { + age: 34 +} +"#, + ) + .unwrap(); + + fs::write( + dir.path().join("file2.sb"), + r#" +character Martha { + age: 42 +} +"#, + ) + .unwrap(); + + let output = Command::new(sb_binary()) + .arg("validate") + .arg(dir.path()) + .output() + .expect("Failed to execute sb validate"); + + assert!( + !output.status.success(), + "Should fail with duplicate definition error" + ); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!(stderr.contains("Duplicate definition of 'Martha'")); +} + +// ===== Template Composition Tests ===== + +#[test] +fn test_template_composition_with_includes() { + let dir = TempDir::new().unwrap(); + + fs::write( + dir.path().join("templates.sb"), + r#" +// Base template +template Being { + alive: true +} + +// Template that includes Being +template Human { + include Being + kind: "human" +} + +// Character with template composition +character Martha from Human { + firstName: "Martha" + age: 34 +} +"#, + ) + .unwrap(); + + let output = Command::new(sb_binary()) + .arg("validate") + .arg(dir.path()) + .output() + .expect("Failed to execute sb validate"); + + assert!( + output.status.success(), + "Template composition with includes should succeed" + ); + + let stdout = String::from_utf8_lossy(&output.stdout); + assert!(stdout.contains("✓ Validation successful!")); + assert!(stdout.contains("Characters: 1")); +} + +#[test] +fn test_template_composition_multiple_templates() { + let dir = TempDir::new().unwrap(); + + fs::write( + dir.path().join("templates.sb"), + r#" +template Physical { + height: 0 + weight: 0 +} + +template Mental { + iq: 0 +} + +character David from Physical, Mental { + height: 180 + weight: 75 + iq: 120 + firstName: "David" +} +"#, + ) + .unwrap(); + + let output = Command::new(sb_binary()) + .arg("validate") + .arg(dir.path()) + .output() + .expect("Failed to execute sb validate"); + + assert!( + output.status.success(), + "Multiple template inheritance should succeed" + ); + + let stdout = String::from_utf8_lossy(&output.stdout); + assert!(stdout.contains("✓ Validation successful!")); +} + +#[test] +fn test_strict_template_validation_success() { + let dir = TempDir::new().unwrap(); + + fs::write( + dir.path().join("strict.sb"), + r#" +template Person strict { + age: 18..100 + firstName: "" +} + +character Martha from Person { + age: 34 + firstName: "Martha" +} +"#, + ) + .unwrap(); + + let output = Command::new(sb_binary()) + .arg("validate") + .arg(dir.path()) + .output() + .expect("Failed to execute sb validate"); + + assert!( + output.status.success(), + "Strict template with concrete values should succeed" + ); +} + +#[test] +fn test_strict_template_validation_failure() { + let dir = TempDir::new().unwrap(); + + fs::write( + dir.path().join("strict_fail.sb"), + r#" +template Person strict { + age: 18..100 +} + +character Martha from Person { + firstName: "Martha" +} +"#, + ) + .unwrap(); + + let output = Command::new(sb_binary()) + .arg("validate") + .arg(dir.path()) + .output() + .expect("Failed to execute sb validate"); + + assert!( + !output.status.success(), + "Strict template with range value should fail" + ); + + let stderr = String::from_utf8_lossy(&output.stderr); + assert!(stderr.contains("strict template")); + assert!(stderr.contains("range value")); +} + +#[test] +fn test_template_chained_includes() { + let dir = TempDir::new().unwrap(); + + fs::write( + dir.path().join("chained.sb"), + r#" +template Being { + alive: true +} + +template Human { + include Being + kind: "human" +} + +template Person strict { + include Human + age: 18..100 +} + +character Martha from Person { + age: 34 + firstName: "Martha" +} +"#, + ) + .unwrap(); + + let output = Command::new(sb_binary()) + .arg("validate") + .arg(dir.path()) + .output() + .expect("Failed to execute sb validate"); + + assert!( + output.status.success(), + "Chained template includes should succeed" + ); + + let stdout = String::from_utf8_lossy(&output.stdout); + assert!(stdout.contains("✓ Validation successful!")); +} + +#[test] +fn test_reserved_keyword_field_name_fails_at_parse() { + let dir = TempDir::new().unwrap(); + + fs::write( + dir.path().join("reserved.sb"), + r#" +character Martha { + species: "human" + age: 34 +} +"#, + ) + .unwrap(); + + let output = Command::new(sb_binary()) + .arg("validate") + .arg(dir.path()) + .output() + .expect("Failed to execute sb validate"); + + assert!( + !output.status.success(), + "Field with reserved keyword name should fail at parse time" + ); + + let stderr = String::from_utf8_lossy(&output.stderr); + // Parser catches this as UnrecognizedToken before validation + assert!(stderr.contains("Parse error") || stderr.contains("UnrecognizedToken")); +} diff --git a/tests/compiler_errors/00_multiple_errors.sb b/tests/compiler_errors/00_multiple_errors.sb new file mode 100644 index 0000000..182a5a0 --- /dev/null +++ b/tests/compiler_errors/00_multiple_errors.sb @@ -0,0 +1,30 @@ +// Multiple errors in one file +// This demonstrates non-fail-fast error collection - all errors reported at once! + +character Martha { + age: 200 + trust: 1.5 + bond: -0.3 +} + +character David { + age: -5 + love: 2.0 +} + +life_arc Growth { + state child { + on age > 18 -> adult + } + + state adult { + on age > 65 -> senior + on retired -> elderly + } +} + +schedule BadSchedule { + 08:00 -> 12:00: work + 11:30 -> 13:00: lunch + 12:30 -> 17:00: more_work +} diff --git a/tests/compiler_errors/01_unexpected_token.sb b/tests/compiler_errors/01_unexpected_token.sb new file mode 100644 index 0000000..d48c731 --- /dev/null +++ b/tests/compiler_errors/01_unexpected_token.sb @@ -0,0 +1,6 @@ +// Error: Missing colon after field name +// This demonstrates the UnexpectedToken parse error + +character Martha { + age 34 +} diff --git a/tests/compiler_errors/05_trait_out_of_range.sb b/tests/compiler_errors/05_trait_out_of_range.sb new file mode 100644 index 0000000..1cbb278 --- /dev/null +++ b/tests/compiler_errors/05_trait_out_of_range.sb @@ -0,0 +1,8 @@ +// Error: Trait value outside valid range +// Demonstrates TraitOutOfRange validation error + +character Martha { + age: 34 + trust: 1.5 + bond: -0.2 +} diff --git a/tests/compiler_errors/07_unknown_life_arc_state.sb b/tests/compiler_errors/07_unknown_life_arc_state.sb new file mode 100644 index 0000000..3712a91 --- /dev/null +++ b/tests/compiler_errors/07_unknown_life_arc_state.sb @@ -0,0 +1,12 @@ +// Error: Transition to undefined state +// Demonstrates UnknownLifeArcState validation error + +life_arc Growth { + state child { + on age > 18 -> adult + } + + state adult { + on age > 65 -> senior + } +} diff --git a/tests/compiler_errors/08_schedule_overlap.sb b/tests/compiler_errors/08_schedule_overlap.sb new file mode 100644 index 0000000..98a6970 --- /dev/null +++ b/tests/compiler_errors/08_schedule_overlap.sb @@ -0,0 +1,8 @@ +// Error: Schedule blocks overlap in time +// Demonstrates ScheduleOverlap validation error + +schedule DailyRoutine { + 08:00 -> 12:30: work + 12:00 -> 13:00: lunch + 13:00 -> 17:00: work +} diff --git a/tests/compiler_errors/README.md b/tests/compiler_errors/README.md new file mode 100644 index 0000000..ce104ad --- /dev/null +++ b/tests/compiler_errors/README.md @@ -0,0 +1,55 @@ +# Compiler Error Examples + +This directory contains example `.sb` files that demonstrate each type of error +the Storybook compiler can detect. Each file is intentionally incorrect to showcase +the error messages and helpful hints. + +## How to Run + +To see all error messages, validate each file individually: + +```bash +# From the storybook root directory +cargo build --release + +# Run each file to see its error +./target/release/sb validate tests/compiler_errors/01_unexpected_token.sb +./target/release/sb validate tests/compiler_errors/02_unexpected_eof.sb +./target/release/sb validate tests/compiler_errors/03_invalid_token.sb +# ... etc +``` + +Or use this script to show all errors: + +```bash +#!/bin/bash +for file in tests/compiler_errors/*.sb; do + echo "═══════════════════════════════════════════════════════════" + echo "File: $(basename $file)" + echo "═══════════════════════════════════════════════════════════" + cargo run --bin sb -- validate "$file" 2>&1 || true + echo "" +done +``` + +## Error Categories + +### Parse Errors (Syntax) +- `01_unexpected_token.sb` - Missing colon after field name +- `02_unexpected_eof.sb` - Incomplete declaration +- `03_invalid_token.sb` - Invalid character in syntax +- `04_unclosed_prose.sb` - Prose block missing closing `---` + +### Validation Errors (Semantics) +- `05_trait_out_of_range.sb` - Trait value outside 0.0-1.0 range +- `06_age_out_of_range.sb` - Age value outside 0-150 range +- `07_unknown_life_arc_state.sb` - Transition to undefined state +- `08_schedule_overlap.sb` - Schedule blocks overlap in time +- `09_unknown_behavior_action.sb` - Undefined behavior tree action +- `10_duplicate_field.sb` - Same field name used twice +- `11_relationship_bond_out_of_range.sb` - Bond value outside 0.0-1.0 range + +Each error includes: +- ✓ Clear error message explaining what went wrong +- ✓ Helpful hint on how to fix it +- ✓ Context-specific suggestions diff --git a/tests/compiler_errors/run_examples.sh b/tests/compiler_errors/run_examples.sh new file mode 100755 index 0000000..5bf28cb --- /dev/null +++ b/tests/compiler_errors/run_examples.sh @@ -0,0 +1,26 @@ +#!/bin/bash +# Script to run all compiler error examples and see the error messages + +cd "$(dirname "$0")/../.." + +echo "════════════════════════════════════════════════════════════════" +echo "STORYBOOK COMPILER ERRORS - EXAMPLES" +echo "════════════════════════════════════════════════════════════════" +echo "" + +for file in tests/compiler_errors/*.sb; do + if [ -f "$file" ]; then + echo "═══════════════════════════════════════════════════════════════════" + echo "File: $(basename "$file")" + echo "═══════════════════════════════════════════════════════════════════" + cat "$file" | head -3 | tail -2 # Show the comment lines + echo "" + cargo run --quiet --bin sb -- validate "$file" 2>&1 || true + echo "" + echo "" + fi +done + +echo "════════════════════════════════════════════════════════════════" +echo "ALL EXAMPLES COMPLETE" +echo "════════════════════════════════════════════════════════════════" diff --git a/tests/examples/all_types.sb b/tests/examples/all_types.sb new file mode 100644 index 0000000..a57a48f --- /dev/null +++ b/tests/examples/all_types.sb @@ -0,0 +1,54 @@ +// Test all static types + +character Martha { + age: 34 + name: "Martha Baker" +} + +template GenericPerson { + age: 20..60 + energy: 0.5..1.0 +} + +enum BondType { + romantic, + familial, + friendship +} + +institution Bakery { + name: "Martha's Bakery" + address: downtown + capacity: 10 +} + +location Downtown { + name: "Downtown District" + population: 50000 +} + +species Human { + lifespan: 80 + intelligence: high +} + +schedule DailyRoutine { + 06:00 -> 07:00: wake_up + 07:00 -> 09:00: breakfast + 09:00 -> 17:00: work + 17:00 -> 18:00: dinner + 22:00 -> 06:00: sleep +} + +relationship Spousal { + Martha + David + bond: 0.9 + relationship_type: romantic +} + +relationship ParentChild { + Martha as parent + Tommy as child + bond: 1.0 +} diff --git a/tests/examples/behavior_and_lifearc.sb b/tests/examples/behavior_and_lifearc.sb new file mode 100644 index 0000000..073ebf8 --- /dev/null +++ b/tests/examples/behavior_and_lifearc.sb @@ -0,0 +1,50 @@ +// Test behavior trees and life arcs + +life_arc Childhood { + state infant { + on ready -> toddler + } + state toddler { + on ready -> child + } + state child { + on ready -> teen + } +} + +behavior SimpleBehavior { + walk_around +} + +behavior SequenceBehavior { + > { + check_energy + move_to_location + perform_action + } +} + +behavior SelectorBehavior { + ? { + try_option_a + try_option_b + fallback + } +} + +behavior NestedBehavior { + > { + ? { + check_condition_a + check_condition_b + } + perform_action + } +} + +behavior WithSubtree { + > { + @helpers::check_preconditions + main_action + } +} diff --git a/tests/examples/bidirectional_relationships.sb b/tests/examples/bidirectional_relationships.sb new file mode 100644 index 0000000..f3f0801 --- /dev/null +++ b/tests/examples/bidirectional_relationships.sb @@ -0,0 +1,68 @@ +// Test bidirectional relationship resolution +// Relationships can be declared from either participant's perspective + +// Simple relationship with no self/other blocks +relationship Friendship { + Alice + Bob + bond: 0.8 + years_known: 5 +} + +// Relationship with roles +relationship Marriage { + Martha as spouse + David as spouse + bond: 0.9 + anniversary: "2015-06-20" +} + +// Relationship with self/other blocks from one perspective +relationship ParentChild { + Martha as parent self { + responsibility: 1.0 + protective: 0.9 + } other { + dependent: 0.8 + } + Tommy as child +} + +// Asymmetric relationship - different roles +relationship EmployerEmployee { + Martha as employer self { + authority: 0.9 + } other { + respect: 0.8 + } + Elena as employee +} + +// Complex relationship with shared and participant-specific fields +relationship RomanticPartnership { + Alice as partner self { + love: 0.95 + trust: 0.9 + } other { + attraction: 0.85 + respect: 0.95 + } + Charlie as partner + + // Shared fields + commitment: 0.85 + compatibility: 0.9 +} + +// Multiple relationships between same people with different names +relationship Friendship2 { + Alice + Charlie + bond: 0.7 +} + +relationship Coworkers { + Alice + Charlie + workplace: "TechCorp" +} diff --git a/tests/examples/comparisons.sb b/tests/examples/comparisons.sb new file mode 100644 index 0000000..837d3db --- /dev/null +++ b/tests/examples/comparisons.sb @@ -0,0 +1,34 @@ +// Test comparison expressions in life arcs + +life_arc AgeProgression { + state child { + on age > 12 -> teen + } + state teen { + on age >= 18 -> adult + } + state adult { + on age > 65 -> senior + } +} + +life_arc EnergyStates { + state rested { + on energy < 0.3 -> tired + } + state tired { + on energy <= 0.1 -> exhausted + } + state exhausted { + on energy >= 0.5 -> rested + } +} + +life_arc HealthStates { + state healthy { + on health < 50 -> sick + } + state sick { + on health >= 80 -> healthy + } +} diff --git a/tests/examples/equality.sb b/tests/examples/equality.sb new file mode 100644 index 0000000..86d42ba --- /dev/null +++ b/tests/examples/equality.sb @@ -0,0 +1,40 @@ +// Test equality expressions in life arcs + +life_arc NameCheck { + state checking { + on name is "Alice" -> found_alice + on name is "Bob" -> found_bob + } + state found_alice { + on ready -> checking + } + state found_bob { + on ready -> checking + } +} + +life_arc StatusCheck { + state monitoring { + on status is active -> active_state + on status is inactive -> inactive_state + } + state active_state { + on status is inactive -> inactive_state + } + state inactive_state { + on status is active -> active_state + } +} + +life_arc FlagCheck { + state idle { + on completed is true -> done + on completed is false -> working + } + state working { + on completed is true -> done + } + state done { + on completed is false -> working + } +} diff --git a/tests/examples/field_access.sb b/tests/examples/field_access.sb new file mode 100644 index 0000000..8bab844 --- /dev/null +++ b/tests/examples/field_access.sb @@ -0,0 +1,113 @@ +// Test field access in relationship contexts + +relationship Marriage { + PersonA as spouse + PersonB as spouse + + self { + bond: 0.8 + } + other { + bond: 0.8 + } +} + +life_arc RelationshipDynamics { + state stable { + // Field access with comparisons + on self.bond < 0.3 -> troubled + on other.bond < 0.3 -> troubled + on self.bond > 0.9 and other.bond > 0.9 -> thriving + } + + state troubled { + on self.bond > 0.7 and other.bond > 0.7 -> stable + on self.bond < 0.1 or other.bond < 0.1 -> broken + } + + state thriving { + on self.bond < 0.8 or other.bond < 0.8 -> stable + } + + state broken { + on self.bond > 0.5 and other.bond > 0.5 -> troubled + } +} + +life_arc CharacterStates { + state monitoring { + // Field access with self + on self.age > 18 -> adult + on self.energy < 0.2 -> exhausted + on self.health < 30 -> sick + + // Field access with equality + on self.status is active -> active_state + on self.ready is true -> ready_state + } + + state adult { + on self.age < 18 -> monitoring + } + + state exhausted { + on self.energy > 0.7 -> monitoring + } + + state sick { + on self.health > 80 -> monitoring + } + + state active_state { + on self.status is inactive -> monitoring + } + + state ready_state { + on self.ready is false -> monitoring + } +} + +life_arc ComplexFieldAccess { + state checking { + // Nested field access patterns + on self.stats.health > 50 -> healthy + on other.profile.age < 18 -> young_other + + // Field access with logical operators + on self.energy > 0.5 and self.health > 70 -> strong + on not self.ready -> waiting + on self.completed is true or other.completed is true -> done + + // Mixed field access and regular identifiers + on self.score > threshold -> passed + on other.level is beginner and difficulty > 5 -> too_hard + } + + state healthy { + on self.stats.health < 30 -> checking + } + + state young_other { + on other.profile.age >= 18 -> checking + } + + state strong { + on self.energy < 0.3 or self.health < 50 -> checking + } + + state waiting { + on self.ready -> checking + } + + state done { + on self.completed is false and other.completed is false -> checking + } + + state passed { + on self.score < threshold -> checking + } + + state too_hard { + on other.level is advanced or difficulty < 3 -> checking + } +} diff --git a/tests/examples/logical_operators.sb b/tests/examples/logical_operators.sb new file mode 100644 index 0000000..39c5adb --- /dev/null +++ b/tests/examples/logical_operators.sb @@ -0,0 +1,95 @@ +// Test logical operators in life arc transitions + +life_arc ComplexConditions { + state monitoring { + // AND operator + on age > 18 and status is active -> adult_active + on energy > 0.5 and health > 80 -> healthy_energetic + + // OR operator + on tired or hungry -> needs_rest + on age < 5 or age > 65 -> dependent + + // NOT operator + on not ready -> waiting + on not completed -> in_progress + } + + state adult_active { + on age < 18 or status is inactive -> monitoring + } + + state healthy_energetic { + on energy < 0.3 or health < 50 -> monitoring + } + + state needs_rest { + on not tired and not hungry -> monitoring + } + + state dependent { + on age >= 5 and age <= 65 -> monitoring + } + + state waiting { + on ready -> monitoring + } + + state in_progress { + on completed -> monitoring + } +} + +life_arc NestedLogic { + state checking { + // Complex nested conditions + on age > 18 and status is active and energy > 0.5 -> triple_and + on tired or hungry or sick -> any_problem + on not ready and not completed -> both_false + + // Mixed operators + on age > 21 and status is verified or is_admin -> allowed + on health > 50 and not sick or emergency -> proceed + } + + state triple_and { + on age < 18 or status is inactive or energy < 0.5 -> checking + } + + state any_problem { + on not tired and not hungry and not sick -> checking + } + + state both_false { + on ready or completed -> checking + } + + state allowed { + on age < 21 and status is unverified and not is_admin -> checking + } + + state proceed { + on health < 50 and sick and not emergency -> checking + } +} + +life_arc BooleanLogic { + state idle { + // Boolean literals with operators + on enabled is true and paused is false -> running + on enabled is false or error is true -> stopped + on not initialized -> initializing + } + + state running { + on enabled is false or paused is true -> idle + } + + state stopped { + on enabled is true and error is false -> idle + } + + state initializing { + on initialized -> idle + } +} diff --git a/tests/examples/name_resolution.sb b/tests/examples/name_resolution.sb new file mode 100644 index 0000000..95afc09 --- /dev/null +++ b/tests/examples/name_resolution.sb @@ -0,0 +1,76 @@ +// Test name resolution and duplicate detection + +// These are all unique names - should register successfully +character Alice { + age: 30 + name: "Alice Smith" +} + +character Bob { + age: 35 + name: "Bob Jones" +} + +template PersonTemplate { + age: 18..80 + health: 0.0..1.0 +} + +enum Status { + active, + inactive, + pending +} + +life_arc AgeProgression { + state young { + on age > 18 -> adult + } + state adult { + on age > 65 -> senior + } + state senior {} +} + +schedule DailyRoutine { + 06:00 -> 08:00: wake_up + 08:00 -> 17:00: work + 17:00 -> 22:00: evening + 22:00 -> 06:00: sleep +} + +behavior SimpleBehavior { + walk_around +} + +institution Library { + name: "City Library" + capacity: 100 +} + +relationship Friendship { + Alice + Bob + bond: 0.8 +} + +location Park { + name: "Central Park" +} + +species Human { + lifespan: 80 +} + +// All names above are unique and should be registered in the name table +// The name table can be queried by kind: +// - Characters: Alice, Bob +// - Templates: PersonTemplate +// - Enums: Status +// - LifeArcs: AgeProgression +// - Schedules: DailyRoutine +// - Behaviors: SimpleBehavior +// - Institutions: Library +// - Relationships: Friendship +// - Locations: Park +// - Species: Human diff --git a/tests/examples/override_values.sb b/tests/examples/override_values.sb new file mode 100644 index 0000000..2302c6f --- /dev/null +++ b/tests/examples/override_values.sb @@ -0,0 +1,89 @@ +// Test override as field values + +template HumanNeeds { + sleep: 0.8 + food: 0.7 + social: 0.5 + health: 0.6 +} + +template BakerSchedule { + work_start: 6 + work_end: 14 + lunch_time: 12 +} + +// Override in field value - set operations +character Alice { + name: "Alice" + needs: @HumanNeeds { + sleep: 0.9 + social: 0.7 + } +} + +// Override with remove operation +character Bob { + name: "Bob" + needs: @HumanNeeds { + remove social + sleep: 0.6 + } +} + +// Override with append operation +character Carol { + name: "Carol" + needs: @HumanNeeds { + append creativity: 0.8 + food: 0.9 + } +} + +// Override with mixed operations +character David { + name: "David" + needs: @HumanNeeds { + sleep: 0.95 + remove social + append exercise: 0.7 + } +} + +// Multiple overrides in same character +character Elena { + name: "Elena" + needs: @HumanNeeds { + sleep: 0.7 + food: 0.8 + } + daily_schedule: @BakerSchedule { + work_start: 5 + remove lunch_time + } +} + +// Empty override (inherits all) +character Frank { + name: "Frank" + needs: @HumanNeeds { + } +} + +// Only removes +character Grace { + name: "Grace" + needs: @HumanNeeds { + remove sleep + remove food + } +} + +// Only appends +character Henry { + name: "Henry" + needs: @HumanNeeds { + append rest: 0.5 + append work: 0.8 + } +} diff --git a/tests/examples/relationship_merging.sb b/tests/examples/relationship_merging.sb new file mode 100644 index 0000000..ee1e762 --- /dev/null +++ b/tests/examples/relationship_merging.sb @@ -0,0 +1,74 @@ +// Demonstration of relationship merging +// The same relationship can be declared multiple times from different perspectives +// The resolver will merge them into a single relationship + +// First, define characters +character Alice { + age: 30 + name: "Alice" +} + +character Bob { + age: 32 + name: "Bob" +} + +// Declare the relationship from Alice's perspective +// In a multi-file system, this might be in alice.sb +relationship Friendship_AliceBob { + Alice self { + // Alice's feelings about the friendship + trust: 0.9 + enjoyment: 0.95 + } other { + // How Alice perceives Bob + reliability: 0.85 + humor: 0.9 + } + Bob +} + +// Same relationship from Bob's perspective +// In a multi-file system, this might be in bob.sb +relationship Friendship_AliceBob { + Bob self { + // Bob's feelings about the friendship + trust: 0.85 + enjoyment: 0.9 + } other { + // How Bob perceives Alice + reliability: 0.95 + humor: 0.8 + } + Alice +} + +// The resolver will: +// 1. Recognize these as the same relationship (same participants + name) +// 2. Merge the self/other blocks appropriately +// 3. Validate that shared fields (if any) have the same values + +// Example with shared fields +relationship Professional_AliceBob { + Alice self { + respect: 0.9 + } + Bob + + // Shared field - must have same value in all declarations + workplace: "TechCorp" +} + +// Same relationship, same shared field value +relationship Professional_AliceBob { + Bob self { + respect: 0.85 + } + Alice + + // This MUST match the value in the other declaration + workplace: "TechCorp" +} + +// Note: If the shared field values differed, the resolver would +// report a validation error about conflicting values diff --git a/tests/examples/use_statements.sb b/tests/examples/use_statements.sb new file mode 100644 index 0000000..0095e0f --- /dev/null +++ b/tests/examples/use_statements.sb @@ -0,0 +1,37 @@ +// Test use statement syntax +// Note: Multi-file resolution not yet implemented, +// but syntax is parsed and validated + +// Single import - import one specific item +use characters::Martha; +use templates::GenericPerson; +use enums::BondType; + +// Grouped import - import multiple items from same module +use characters::{David, Tommy, Elena}; +use behaviors::{WorkAtBakery, SocialInteraction, DailyRoutine}; + +// Wildcard import - import everything from a module +use locations::*; +use schedules::*; + +// Nested paths work too +use world::characters::npcs::Merchant; +use schema::core::needs::Hunger; + +// After imports, define local declarations +character LocalCharacter { + age: 25 + name: "Local Person" +} + +template LocalTemplate { + age: 20..60 + energy: 0.5..1.0 +} + +enum LocalEnum { + option_a, + option_b, + option_c +} diff --git a/tests/examples/validation_errors.sb b/tests/examples/validation_errors.sb new file mode 100644 index 0000000..c54b429 --- /dev/null +++ b/tests/examples/validation_errors.sb @@ -0,0 +1,54 @@ +// Test semantic validation errors + +// Valid bond values (should parse and validate) +relationship GoodFriendship { + Alice + Bob + bond: 0.8 +} + +// Invalid bond value - too high (should validate with error) +// relationship BadFriendship1 { +// Carol +// David +// bond: 1.5 // Error: bond > 1.0 +// } + +// Invalid bond value - negative (should validate with error) +// relationship BadFriendship2 { +// Elena +// Frank +// bond: -0.1 // Error: bond < 0.0 +// } + +// Valid age +character YoungPerson { + age: 25 +} + +// Invalid age - negative (commented to allow file to parse) +// character InvalidPerson1 { +// age: -5 // Error: age < 0 +// } + +// Invalid age - too high (commented to allow file to parse) +// character InvalidPerson2 { +// age: 200 // Error: age > 150 +// } + +// Valid life arc with proper transitions +life_arc ValidLifeArc { + state start { + on ready -> end + } + state end { + // Terminal state + } +} + +// Invalid life arc - transition to non-existent state (commented) +// life_arc InvalidLifeArc { +// state start { +// on ready -> nonexistent // Error: state 'nonexistent' not defined +// } +// }