summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/test_sifter.yml30
-rw-r--r--.gitignore4
-rw-r--r--.pylintrc11
-rw-r--r--AnalogyUtils.md234
-rw-r--r--BUILD67
-rw-r--r--LICENSE661
-rw-r--r--README.md172
-rw-r--r--TSLang.md326
-rw-r--r--WORKSPACE13
-rw-r--r--analogy_utils.py230
-rw-r--r--examples/README.md11
-rw-r--r--examples/letter_analogies/BUILD30
-rw-r--r--examples/letter_analogies/README.md17
-rw-r--r--examples/letter_analogies/letter_analogy.py222
-rw-r--r--examples/letter_analogies/letter_tactics.py50
-rw-r--r--examples/letter_analogies/test_letter_analogy.py11
-rw-r--r--examples/program_analysis/BUILD62
-rw-r--r--examples/program_analysis/README.md25
-rw-r--r--examples/program_analysis/analyzelib.py244
-rw-r--r--examples/program_analysis/api_migration.py72
-rw-r--r--examples/program_analysis/lazy_structure.py305
-rw-r--r--examples/program_analysis/paper_demos/api1.after.txt14
-rw-r--r--examples/program_analysis/paper_demos/api1.before.txt14
-rw-r--r--examples/program_analysis/paper_demos/api2.after.txt14
-rw-r--r--examples/program_analysis/paper_demos/api2.before.txt14
-rw-r--r--examples/program_analysis/paper_demos/api3.after.txt0
-rw-r--r--examples/program_analysis/paper_demos/api3.before.txt14
-rw-r--r--examples/program_analysis/paper_demos/bash.txt17
-rw-r--r--examples/program_analysis/paper_demos/docs.after.txt9
-rw-r--r--examples/program_analysis/paper_demos/docs.before.txt9
-rw-r--r--examples/program_analysis/paper_demos/fish.txt13
-rw-r--r--examples/program_analysis/paper_demos/gemm1.after.txt6
-rw-r--r--examples/program_analysis/paper_demos/gemm1.before.txt6
-rw-r--r--examples/program_analysis/paper_demos/gemm2.after.txt10
-rw-r--r--examples/program_analysis/paper_demos/gemm2.before.txt10
-rw-r--r--examples/program_analysis/paper_demos/gemm3.after.txt0
-rw-r--r--examples/program_analysis/paper_demos/gemm3.before.txt6
-rw-r--r--examples/program_analysis/paper_demos/gemm4.bad.txt6
-rw-r--r--examples/program_analysis/program_understanding.py72
-rw-r--r--examples/program_analysis/transform_learning.py61
-rw-r--r--examples/program_analysis/ui/BUILD12
-rw-r--r--examples/program_analysis/ui/index.html13
-rw-r--r--examples/program_analysis/ui/index.js74
-rw-r--r--examples/program_analysis/ui/lazy_structure_parser.py68
-rw-r--r--examples/program_analysis/ui/leader-line.min.js2
-rw-r--r--examples/program_analysis/ui/plain-draggable.min.js2
-rw-r--r--examples/program_analysis/ui/serve.py58
-rw-r--r--examples/program_analysis/ui/style.css35
-rw-r--r--examples/turing_machine/BUILD19
-rw-r--r--examples/turing_machine/README.md14
-rw-r--r--examples/turing_machine/test_turing_machine.py14
-rw-r--r--examples/turing_machine/turing_machine.py156
-rw-r--r--mapper.py121
-rw-r--r--requirements.txt3
-rw-r--r--runtime/BUILD74
-rw-r--r--runtime/README.md33
-rw-r--r--runtime/assignment.py145
-rw-r--r--runtime/cpp_structure.py153
-rw-r--r--runtime/interactive.py164
-rw-r--r--runtime/matcher.py273
-rw-r--r--runtime/pattern.py114
-rw-r--r--runtime/production_rule.py165
-rw-r--r--runtime/runtime.py96
-rw-r--r--runtime/shadow_input.py89
-rw-r--r--runtime/tests/BUILD20
-rw-r--r--runtime/tests/test_cpp_structure.py53
-rw-r--r--runtime/tests/test_utils.py71
-rw-r--r--runtime/utils.py88
-rw-r--r--tactic_utils.py76
-rw-r--r--tests/BUILD20
-rw-r--r--tests/test_ts_lib.py243
-rw-r--r--tests/test_ts_utils.py91
-rw-r--r--ts_cpp/.lvimrc2
-rw-r--r--ts_cpp/setup.py19
-rw-r--r--ts_cpp/solver.cc194
-rw-r--r--ts_cpp/structure.cc68
-rw-r--r--ts_cpp/ts_lib.cc26
-rw-r--r--ts_cpp/ts_lib.h96
-rw-r--r--ts_lib.py590
-rw-r--r--ts_utils.py136
80 files changed, 6780 insertions, 2 deletions
diff --git a/.github/workflows/test_sifter.yml b/.github/workflows/test_sifter.yml
new file mode 100644
index 0000000..4b88072
--- /dev/null
+++ b/.github/workflows/test_sifter.yml
@@ -0,0 +1,30 @@
+# Adapted from: https://github.com/pubref/rules_protobuf/blob/master/.travis.yml
+name: Test Sifter
+
+on: pull_request
+
+jobs:
+ test-sifter:
+ runs-on: [ubuntu-18.04]
+ steps:
+ - uses: actions/checkout@v1
+ - name: Build and test Sifter.
+ run: |
+ V=3.7.0
+ OS=linux
+ ARCH=x86_64
+ GH_BASE="https://github.com/bazelbuild/bazel/releases/download/$V"
+ GH_ARTIFACT="bazel-$V-installer-$OS-$ARCH.sh"
+ CI_BASE="http://ci.bazel.io/job/Bazel/JAVA_VERSION=1.8,PLATFORM_NAME=$OS-$ARCH/lastSuccessfulBuild/artifact/output/ci"
+ CI_ARTIFACT="bazel--installer.sh"
+ URL="$GH_BASE/$GH_ARTIFACT"
+ echo $URL
+ wget -O install.sh $URL
+ chmod +x install.sh
+ ./install.sh --user
+ rm -f install.sh
+ git clone https://github.com/95616ARG/bazel_python.git
+ cd bazel_python
+ echo y | ./setup_python.sh 3.7.4 $HOME/.bazel_python
+ cd .. && rm -rf bazel_python
+ /home/runner/bin/bazel test //...
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..6210e63
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,4 @@
+__pycache__
+bazel-*
+htmlcov
+.coverage
diff --git a/.pylintrc b/.pylintrc
new file mode 100644
index 0000000..1c6030a
--- /dev/null
+++ b/.pylintrc
@@ -0,0 +1,11 @@
+[BASIC]
+
+# Good variable names which should always be accepted, separated by a comma
+good-names=ts,rt,i,j,k,ex,Run,_
+
+# Allow different function names as long as consistent within a module (see
+# README).
+# See https://docs.pylint.org/en/1.6.0/options.html#multiple-naming-styles
+function-rgx=(?:(?P<snake>[a-z_][a-z0-9_]{2,30})|(?P<camel>(_?([A-Z]|[a-z])*){2,30}))$
+
+method-rgx=(?:(?P<snake>[a-z_][a-z0-9_]{2,30})|(?P<camel>(_?([A-Z]|[a-z])*){2,30}))$
diff --git a/AnalogyUtils.md b/AnalogyUtils.md
new file mode 100644
index 0000000..1dedb58
--- /dev/null
+++ b/AnalogyUtils.md
@@ -0,0 +1,234 @@
+# Analogy-Utils
+Python driver for interacting with the Mapper rules in [mapper.py](mapper.py).
+
+### Running Example
+We will use the following running example from
+[examples/letter_analogies/letter_analogy.py](examples/letter_analogies/letter_analogy.py):
+```
+If abc becomes bcd and efg becomes efh, then what does ijk become?
+```
+For notational simplicity, we will label the nodes corresponding to letters
+like so:
+```
+If abc becomes (b')(c')(d') and efg becomes (f')(g')(h'), then what does ijk become?
+```
+
+We will also assume facts related to the position of letters in the strings as
+well as alphabetical ordering:
+```
+(n1, a, Left), (n1, b, Right)
+(n2, b, Left), (n2, c, Right)
+...
+(s1, a, Pred), (s1, b, Succ)
+(s2, b, Pred), (s2, c, Succ)
+(s3, a, Pred), (s3, b', Succ)
+(s4, b, Pred), (s4, c', Succ)
+...
+```
+and so forth.
+
+### Broad Idea: Joint Traversal
+Our goal is to form a map identifying corresponding nodes.
+
+If we think of the triplet structure as a graph, the basic idea behind our
+approach is to do a joint, breadth-first traversal of the graph. We start by
+assigning two nodes to correspond to each other, then we extend the
+correspondance by following edges from each node iteratively and
+simultaneously.
+
+### Starting the Analogy
+We seed the analogy by telling it that two given nodes should correspond to
+each other. In fact, we tell it that two _facts_ should correspond to each
+other. In this case, a pretty safe fact to start with is that `abc` and `efg`
+are both letter strings that are "pre-transformation." In other words, we
+want to abstract the facts:
+```
+(t1, abc, TransformFrom) and (t2, efg, TransformFrom)
+```
+to form abstract nodes `^t` and `^???` with fact:
+```
+(^t, ^???, TransformFrom).
+```
+
+##### In Code
+In order to do this, we use the `Analogy.Begin` method, roughly like:
+```
+analogy = Analogy.Begin(rt, {
+ no_slip[":MA"]: t1,
+ no_slip[":A"]: abc,
+ no_slip[":MB"]: t2,
+ no_slip[":B"]: efg,
+ no_slip[":C"]: TransformFrom,
+}).
+```
+
+### Extending the Start
+If `extend=True` is passed to `Analogy.Begin` (the default) then it will
+automatically start to build out from this fact. Essentially, it will look at
+all other facts regarding `t1` and `t2` and try to lift (antiunify) them into
+abstract facts. In this case, we find that there are corresponding facts:
+```
+(t1, b'c'd', TransformTo) and (t2, f'g'h', TransformTo)
+```
+Hence in the abstract we can add the node `^?'?'?'` as well as the fact:
+```
+(^t, ^?'?'?', TransformTo).
+```
+
+##### In Code
+Again, this happens automatically in `Analogy.Begin(..., extend=True)`. At this
+point, the abstraction consists of two abstract groups, `^???` and `^?'?'?'`,
+where the latter is the post-transformation of the former. Hence the only
+correspondance we know between the two examples so far is that they both
+involve pairs of letter strings before and after the transformation. This is
+all that is claimed by our initial mapping of `t1` and `t2`, hence
+`Analogy.Begin` finishes.
+
+### Pivots: Extending With New Fact Nodes
+To extend the analogy further, we need to involve additional fact nodes. We do
+this by pivoting off of nodes already in the analogy and identifying fact nodes
+that claim similar things about nodes already mapped to each other. For
+example, we might have fact nodes `h1` and `h2` expressing that `a` is the
+start of string `abc` and `e` is the start of string `efg`:
+```
+(h1, abc, Group), (h1, a, Head)
+and
+(h2, efg, Group), (h2, e, Head).
+```
+Note that `abc` and `efg` are already mapped to each other in the analogy, and
+`h1` and `h2` both claim the same thing about `abc`/`efg` (namely, that they're
+groups). Hence, we can infer that `h1` and `h2` might correspond to each other,
+forming a new abstract node `^h` with fact:
+```
+(^h, ^???, Group).
+```
+
+##### In Code
+We perform this pivoting to a new fact node with the method
+`Analogy.ExtendMap`:
+```
+analogy.ExtendMap([Group]).
+```
+
+### Building off a Fact Node
+We've now recognized that both `abc` and `efg` are groups of letters, but `h1`
+and `h2` also claim something else: that each group has a head letter that
+starts it. Because we've mapped `h1` and `h2` to each other, we can follow this
+fact as well to infer that the heads of each group should probably correspond
+as well. In other words, we can lift `a` and `e` to abstract node `^1` and add
+fact:
+```
+(^h, ^1, Head).
+```
+
+##### In Code
+The call to `Analogy.ExtendMap` where we added `^h` in the first place will
+automatically follow all facts of this form when possible. It does this by
+calling the method `Analogy.ExtendFacts(^h)` which in turn repeatedly calls
+the `NewConcrete` rule to add nodes like `^1` and `Analogy.LiftFacts(^h)` to
+lift any other triplets like `(^h, ^1, Head)`.
+
+### Summary of Analogy-Making by Traversal
+In general, the operations described above are enough to create an analogy. We
+pivot repeatedly between:
+(i) Abstracting fact nodes that make claims about nodes already in the
+analogy. E.g., `h1` and `h2` claim `abc` and `efg` (which we know correspond)
+are groups, hence, `h1<->h2` is probably consistent with our analogy so we
+can abstract them to `^h`.
+(ii) Abstract nodes for which claims are made in those corresponding fact
+nodes. E.g., we think `h1` and `h2` correspond, and `h1` claims `a` is a head
+while `h2` claims `e` is a head of corresponding groups `abc` and `efg`. Hence,
+we might infer that in fact `a` and `e` correspond, forming some abstract node
+`^1` which is also a head of the abstract group `^???`.
+
+### Avoiding Bad Maps
+Unfortunately, this approach can run into problems. For example, after we say
+that `a` and `e` correspond, we might notice that there are fact nodes `s1` and
+`m1` with facts:
+```
+(s1, a, Pred), (s1, b, Succ)
+and
+(m1, e, Pred), (m1, f, Succ).
+```
+We could then map `s1<->m1` and follow this to map `b<->f`, which would
+work perfectly. However, there might _also_ be a fact node `m3` with facts:
+```
+(m3, e, Pred), (m3, f', Succ),
+```
+which actually maps _across groups_ `efg` and `f'g'h'`. The problem is that we
+pivot to groups based only on a single triplet, and, hence, looking at only a
+single triplet it's not clear if we should map
+```
+(s1, a, Pred)<->(m1, e, Pred)
+or
+(s1, a, Pred)<->(m3, e, Pred).
+```
+Both options look equally good when deciding whether `s1` should correspond to
+`m1` or `m3`. If we pick `s1<->m1`, we saw that everything works and we map
+`b<->f` as desired. However, if we map `s1<->m3` then we will infer that
+actually `b<->f'`, which is probably wrong. We need some way to decide
+between the two equally plausible mappings.
+
+##### Heuristic 1: Follow Unique Claims First
+The first heuristic is to avoid such scenarios when possible by following
+claims which are _unique_. In this case, the problem only came about because
+the first `a` was actually an alphabetical predecessor of two different nodes,
+the `b` in `abc` and the `b'` in `b'c'd'`. So when we follow predecessor ->
+successor, we have to make a choice of which successor we want to choose.
+
+If we instead had followed the claim that that `a` is _to the left_ of some
+other letter, there would only be one choice: the `b` in `abc`. Similarly, the
+only thing to the right of the `e` is the `f` in `efg`. Hence, if we had
+followed the `Left`/`Right` relation instead of `Pred`/`Succ`, we would have
+arrived at the most reasonable option `b<->f`.
+
+This generally means following _structural_ relations first, and _semantic_
+relations only after that.
+
+In code, this usually looks like calling `analogy.ExtendMap` multiple times
+with different parameters, of decreasing level of uniqueness.
+
+##### Heuristic 2: All or Nothing
+Following `Left`/`Right` relations first gives us the desired correspondance of
+`b<->f`. However, this doesn't immediately solve the original problem of
+determining if `s1<->m1` or `s1<->m3`. Once we have decided `b<->f`, however,
+we can try both `s1<->m1` and `s1<->m3` and apply our second heuristic: take
+_only the fact nodes where all facts lift_. In this case, we could try to
+correspond `s1<->m3` but then we would find that we could _not_ make the facts
+```
+(s1, b, Succ) and (m3, f', Succ)
+```
+correspond to each other, because we do not have `b<->f'`. Thus, `s1<->m3`
+would leave facts which don't lift to the abstraction while `s1<->m1` would be
+able to lift all the relavant facts. Hence, we would prefer `s1<->m1`.
+
+In the code, this is implemented in `analogy.ExtendFacts` by calling
+`analogy.LiftFacts` to try and lift all relevant facts to the abstract and
+then `analogy.FactsMissing` to check if all facts were lifted. If some can't
+be lifted, then `ExtendFacts` will return `False` and `ExtendMap` will give
+up on that mapping.
+
+##### Heuristic 3: Voting
+In the near future we would like to take an alternate approach, which is
+somewhat closer to the original Copycat: voting. Essentially, in this case we
+have that following `s1<->m1` leads to a better analogy because then we can
+lift all facts and it also agrees with the unique mapping when we follow
+`Left`/`Right` to get `b<->f`.
+
+### Completing an Analogy
+Suppose we have already mapped `abc->bcd` and `efg->fgh` and want to start
+solving `ijk->?`. We:
+* First call `Analogy.Begin(..., exists=True)` to map `ijk` into the _existing_
+ analogy noting correspondances between `abc->bcd` and `efg->fgh`.
+* Then, we call `Analogy.ExtendMap` as before to complete the analogy between
+ `ijk->?` and `abc->bcd`/`efg->fgh`.
+* Then, we set `analogy.state="concretize"`.
+* Then, we again call `Analogy.ExtendMap`. It will continue to traverse the
+ existing analogy between `abc->bcd` and `efg->fgh`, but, because we set
+ `state="concretize"`, instead of looking for nodes already in the structure
+ that might correspond to abstract nodes, it just adds new nodes to the
+ structure and lowers the corresponding facts from the abstract to these
+ nodes.
+* Finally, we run inference rules which solve those lowered facts. E.g., we
+ might lower a fact that says that `_1` is the successor of `a`, then infer
+ that `_1` is the letter `b`.
diff --git a/BUILD b/BUILD
new file mode 100644
index 0000000..38092d9
--- /dev/null
+++ b/BUILD
@@ -0,0 +1,67 @@
+load("@bazel_python//:bazel_python.bzl", "bazel_python_coverage_report", "bazel_python_interpreter")
+
+bazel_python_interpreter(
+ name = "bazel_python_venv",
+ python_version = "3.7.4",
+ requirements_file = "requirements.txt",
+ run_after_pip = """
+ pushd ts_cpp
+ python3 setup.py install || exit 1
+ popd
+ """,
+ run_after_pip_srcs = glob(["ts_cpp/*"]),
+ visibility = ["//:__subpackages__"],
+)
+
+bazel_python_coverage_report(
+ name = "coverage_report",
+ code_paths = [
+ "*.py",
+ "runtime/*.py",
+ ],
+ test_paths = [
+ "tests/*",
+ "runtime/tests/*",
+ "examples/*/test_*",
+ ],
+)
+
+py_library(
+ name = "ts_lib",
+ srcs = ["ts_lib.py"],
+ visibility = ["//visibility:public"],
+ deps = [],
+)
+
+py_library(
+ name = "ts_utils",
+ srcs = ["ts_utils.py"],
+ visibility = ["//visibility:public"],
+ deps = [],
+)
+
+py_library(
+ name = "mapper",
+ srcs = ["mapper.py"],
+ visibility = ["//visibility:public"],
+ deps = [":ts_utils"],
+)
+
+py_library(
+ name = "tactic_utils",
+ srcs = ["tactic_utils.py"],
+ visibility = ["//visibility:public"],
+ deps = [
+ ":ts_utils",
+ "//runtime:matcher",
+ ],
+)
+
+py_library(
+ name = "analogy_utils",
+ srcs = ["analogy_utils.py"],
+ visibility = ["//visibility:public"],
+ deps = [
+ ":tactic_utils",
+ ],
+)
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..0ad25db
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,661 @@
+ GNU AFFERO GENERAL PUBLIC LICENSE
+ Version 3, 19 November 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU Affero General Public License is a free, copyleft license for
+software and other kinds of works, specifically designed to ensure
+cooperation with the community in the case of network server software.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+our General Public Licenses are intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ Developers that use our General Public Licenses protect your rights
+with two steps: (1) assert copyright on the software, and (2) offer
+you this License which gives you legal permission to copy, distribute
+and/or modify the software.
+
+ A secondary benefit of defending all users' freedom is that
+improvements made in alternate versions of the program, if they
+receive widespread use, become available for other developers to
+incorporate. Many developers of free software are heartened and
+encouraged by the resulting cooperation. However, in the case of
+software used on network servers, this result may fail to come about.
+The GNU General Public License permits making a modified version and
+letting the public access it on a server without ever releasing its
+source code to the public.
+
+ The GNU Affero General Public License is designed specifically to
+ensure that, in such cases, the modified source code becomes available
+to the community. It requires the operator of a network server to
+provide the source code of the modified version running there to the
+users of that server. Therefore, public use of a modified version, on
+a publicly accessible server, gives the public access to the source
+code of the modified version.
+
+ An older license, called the Affero General Public License and
+published by Affero, was designed to accomplish similar goals. This is
+a different license, not a version of the Affero GPL, but Affero has
+released a new version of the Affero GPL which permits relicensing under
+this license.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU Affero General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Remote Network Interaction; Use with the GNU General Public License.
+
+ Notwithstanding any other provision of this License, if you modify the
+Program, your modified version must prominently offer all users
+interacting with it remotely through a computer network (if your version
+supports such interaction) an opportunity to receive the Corresponding
+Source of your version by providing access to the Corresponding Source
+from a network server at no charge, through some standard or customary
+means of facilitating copying of software. This Corresponding Source
+shall include the Corresponding Source for any work covered by version 3
+of the GNU General Public License that is incorporated pursuant to the
+following paragraph.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the work with which it is combined will remain governed by version
+3 of the GNU General Public License.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU Affero General Public License from time to time. Such new versions
+will be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU Affero General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU Affero General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU Affero General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU Affero General Public License as published
+ by the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU Affero General Public License for more details.
+
+ You should have received a copy of the GNU Affero General Public License
+ along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If your software can interact with users remotely through a computer
+network, you should also make sure that it provides a way for users to
+get its source. For example, if your program is a web application, its
+interface could display a "Source" link that leads users to an archive
+of the code. There are many ways you could offer source, and different
+solutions will be better for different programs; see section 13 for the
+specific requirements.
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU AGPL, see
+<https://www.gnu.org/licenses/>.
diff --git a/README.md b/README.md
index ce7a9b6..8dce35f 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,171 @@
-# sifter
+# Sifter
+This repository contains code implementing the prototype analogy-making program
+Sifter, as described in our 'Onward!' 2020 paper "Analogy-Making as a Core
+Primitive in the Software Engineering Toolbox."
-Code for our paper [Analogy-Making as a Core Primitive in the Software Engineering Toolbox](https://arxiv.org/abs/2009.06592) will be posted here by mid-October.
+Sifter can make _analogies_ about programs, e.g., identifying data structures
+and methods which play corresponding roles in two different programs. Sifter
+can also _complete_ analogies, which allows it to automatically learn and
+generalize source code transformations from a small number of examples.
+
+#### Dependencies
+You must install [Bazel](https://bazel.build) as well as set up
+[bazel_python](https://github.com/95616ARG/bazel_python) with Python 3.7.4.
+
+#### Running Tests, Examples
+You should then be able to run
+```bash
+bazel test //... && bazel run coverage_report
+```
+to get a coverage report in `htmlcov/index.html`.
+
+To run the examples:
+```bash
+bazel run examples/letter_analogies:letter_analogy
+bazel run examples/turing_machine:turing_machine
+bazel run examples/program_analysis:program_understanding
+bazel run examples/program_analysis:api_migration
+bazel run examples/program_analysis:transform_learning
+```
+For the `program_analysis` ones, you can view the result by visiting
+`http://localhost:8001` in your browser once prompted.
+
+#### Goals, Status, and Future Work
+This repository accompanies our paper in
+[Onward! 2020](https://2020.splashcon.org/track/splash-2020-Onward-papers?#the-character-of-onward),
+with the goal of encouraging interest and future work in automated
+analogy-making for software engineering. Ultimately, we see analogy-making as
+involving three main processes:
+1. _Raw Perception_, such as reading files (code, documentation) from the
+ filesystem and into the triplet structure workspace.
+
+ **Status:** The `LazyStructure` and `LazyTextDocument` classes in
+ [examples/program_analysis/lazy_structure.py](examples/program_analysis/lazy_structure.py)
+ read a file from the filesystem and add a representation of the file's
+ contents to the triplet structure workspace. These interfaces are 'lazy' in
+ that they allow selecting only certain parts of a file to be included in the
+ workspace. Currently we either specify ahead-of-time which portions of the
+ file to include in the workspace or just add the entire file contents.
+
+ **Future Work:** We envision the laziness being used to initially focus the
+ analogy-making on a small subset of the relevant code, then expanding to
+ larger portions as the analogy solidifies.
+2. _Semantic Rewriting_, such as grouping individual characters into a single
+ token, inlining a function, and annotating invariants found via program
+ analysis.
+
+ **Status:** Currently, the default `LazyTextDocument` interface performs a
+ light-weight tokenization pass that groups characters separated by spaces
+ and special characters (such as `+`) before encoding the document into the
+ workspace. Extra semantic information, such as program invariants, are
+ specified ahead-of-time by either directly editing the workspace or using
+ the `AnnotateFact` method of `LazyTextDocument`. There is an example of
+ annotating program invariants in
+ [examples/program_analysis/transform_learning.py](examples/program_analysis/transform_learning.py).
+
+ **Future Work:** We would like to directly connect Sifter to program
+ analysis tools so analysis results can be automatically imported into the
+ triplet structure workspace instead of needing to be manually annotated. We
+ would like to incorporate rewrite rules that express semantic equivalence,
+ such as inlining, syntax de-sugaring, and grouping. Finally, we would like
+ to develop heuristics that can determine when to apply a program analyzer or
+ semantics-preserving rewrite rule. Such a heuristic would need to operate in
+ tandem with and be guided by the abstraction/anti-unification process.
+3. _Abstraction/Anti-Unification_, where we pair up corresponding objects in
+ the workspace to form an analogy.
+
+ **Status:** We have fairly-complete rules and heuristics for this that are
+ described in more detail in [AnalogyUtils.md](AnalogyUtils.md). These rules
+ roughly implement a syntactic anti-unification on unrooted, labelled graphs.
+
+ **Future Work:** Our existing abstraction rules work well, but depend on the
+ semantic rewriting to have already exposed most of the correspondences in
+ the syntactic representation of the workspace itself. So the primary future
+ work for this process is to provide feedback to the semantic rewriting
+ engine. For example, given programs `if x: y += 1` and `z += a ? 1 : 0;` we
+ may not be able to initially find a good syntactic abstraction, but we want
+ the abstraction process to be able to give feedback to the semantic
+ rewriting process to, e.g., desugar the ternary `?:` operator into the
+ corresponding `if` statement, which we can then find a syntactic
+ correspondance with. Beyond such better heuristics, we would also like to
+ support higher-order 'slips,' i.e., analogies where the types themselves are
+ abstracted.
+
+In addition to these three processes, there are two other main directions for
+future work:
+* The runtime does not currently support modifying rules with other rules; in
+ fact, rule-related nodes are removed from the structure completely after an
+ initial rule-parsing pass. In the future we may decide to change this to
+ assist with meta-learning.
+* We are still working on a good visualization module for triplet structures.
+ There is a rudimentary CLI interface
+ ([runtime/interactive.py](runtime/interactive.py)) as well as an interface
+ specifically for source code analogies
+ ([examples/program_analysis/ui](examples/program_analysis/ui)), however we
+ plan to introduce a more general-purpose interface in the near future.
+
+#### High-level File Overview
+- [ts_lib.py](ts_lib.py): the core library, defines the triplet structure data
+ structure and some embedded-DSL-style helpers for describing triplet
+ structures.
+- [ts_utils.py](ts_utils.py): a set of macros which make working with the
+ library (especially expressing rules) easier.
+- [tactic_utils.py](tactic_utils.py): a set of macros which make writing
+ tactics ("controllers for applying the rules") easier.
+- [mapper.py](mapper.py): rules which can be added to any triplet structure to
+ enable making abstract analogies (i.e., identifying isometries in
+ sub-structures).
+- [analogy_utils.py](analogy_utils.py): a Python interface and tactics to
+ building analogies in triplet structures that have the rules from
+ [mapper.py](mapper.py) added to them.
+- [ts_cpp/](ts_cpp/): C++ implementation of the core triplet structure data
+ structure, as well as a backtracking triplet constraint solver.
+- [runtime/](runtime/): a runtime to parse and execute rules on
+ TripletStructures.
+- [examples/](examples/): examples of using triplet structures to solve
+ analogies.
+
+#### Quickstart with the Code
+Please see [TSLang.md](TSLang.md), which documents how to write code using the
+Triplet Structure language.
+
+#### Programming Style
+We see much of the code in this repository as defining a domain-specific
+language [TSLang](TSLang.md) embedded in Python. We then write, within
+`TSLang`, the core analogy-making code for Sifter.
+
+To highlight the difference between "plumbing" code implementing the `TSLang`
+language and runtime vs. the actual analogy-making code written in `TSLang`, we
+use a distinct coding convention for each type of code:
+1. Code implementing `TSLang`, specifically `ts_lib.py` and `runtime/*`, is
+ written using Google-style Python naming (e.g., `snake_case` for methods and
+ `PascalCase` for classes).
+2. Code written in `TSLang` (everything else) is written using `PascalCase` for
+ method names.
+
+This naming convention is enforced intra-file by `.pylintrc`.
+
+#### Citing
+```
+@inproceedings{sifter:onward20,
+ author = {Matthew Sotoudeh and
+ Aditya V. Thakur},
+ title = {Analogy-Making as a Core Primitive in the Software Engineering Toolbox},
+ booktitle = {Proceedings of the 2020 {ACM} {SIGPLAN} International Symposium on
+ New Ideas, New Paradigms, and Reflections on Programming and Software,
+ Onward! 2020, November 18-20, Virtual, USA},
+ publisher = {{ACM}},
+ year = {2020},
+ url = {https://doi.org/10.1145/3426428.3426918},
+ doi = {10.1145/3426428.3426918},
+}
+```
+
+#### People
+- [Matthew Sotoudeh](https://masot.net/): email
+ [masotoudeh@ucdavis.edu](mailto:masotoudeh@ucdavis.edu).
+- [Aditya Thakur](https://thakur.cs.ucdavis.edu/): email
+ [avthakur@ucdavis.edu](mailto:avthakur@ucdavis.edu).
+
+#### License
+Licensed under the [AGPLv3](LICENSE).
diff --git a/TSLang.md b/TSLang.md
new file mode 100644
index 0000000..e1c2113
--- /dev/null
+++ b/TSLang.md
@@ -0,0 +1,326 @@
+# TSLang
+The goal of this file is to document how to begin writing Sifter code. It
+is specifically directed towards understanding the high-level '`TSLang`'
+interface, which is used to write the analogy-making rules (`mapper.py`) as
+well as the application demos (`examples/...`).
+
+### High-Level View
+At the highest level, `TSLang` programs are programs that define and operate on
+a particular type of data structure, a _triplet structure._ Most `TSLang`
+programs are separated into two stages:
+1. An initial triplet structure is defined with the data of interest, then
+2. The initial structure is then iteratively modified by appling pre-defined
+ _update rules._
+
+`TSLang` provides syntactic sugar to help:
+1. Define triplet structures (`ts_lib`),
+2. Define rules operating on triplet structures (`ts_lib`, `ts_utils`), and
+3. Write _tactics_ determining which rules to apply in what order (`runtime`
+ and `tactic_utils`).
+
+We will address each of these in turn.
+
+### (0) What are Triplet Structures?
+A triplet structure is a particular type of data structure suited to
+representing knowledge about the world.
+
+At its core, a triplet structure consists of exactly two things:
+1. A list of _nodes_ and
+2. A list of _triplet facts_, which are 3-tuples of nodes.
+
+To give some meaning to the structure, we adopt the convention that a triplet
+fact `(A, B, C)` should be interpreted like so:
+1. `A` is represents a particular logical fact,
+2. `B` represents an instance of something,
+3. `C` represents the _type_, or role `B` plays in the fact.
+
+For example, we might want to assert that `Homer` is the `Father` and `Marge`
+is the `Mother` of `Lisa` with the facts:
+```
+(FamilyFact1, Homer, Father),
+(FamilyFact1, Marge, Mother),
+(FamilyFact1, Lisa, Daughter)
+```
+We think of `FamilyFact1` as the unified 'thought' or 'fact node' representing
+the fact that Homer, Marge, and Lisa together make an instance of the "Family"
+type, with the given roles.
+
+For more discussion of triplet structures and how to represent logical
+information with them, please see our Onward paper.
+
+### (1) Representing Triplet Structures with `TSLang`
+A triplet structure is represented by an instance of the `TripletStructure` class.
+Most programs will only have one instance of `TripletStructure`, which we will
+conventionally name `ts`:
+```python
+from ts_lib import TripletStructure
+ts = TripletStructure()
+```
+Nodes in `TSLang` each have a unique, string name. With few exceptions, all
+names should start with `/:`. Nodes are referenced by indexing notation, and
+are automatically created upon reference if they do not yet exist:
+```python
+# Creates nodes '/:Homer' and '/:Marge'
+ts["/:Homer"], ts["/:Marge"]
+```
+To add a fact `(A, B, C)` we use the notation `ts[A].map({ts[B]: ts[C]})`, like
+so:
+```python
+# Adds fact ('/:FamilyFact1', '/:Homer', '/:Father')
+ts["/:FamilyFact1"].map({ts["/:Homer"]: ts["/:Father"]})
+```
+Multiple facts with the same fact node can be expressed naturally as well:
+```python
+# Adds 3 facts.
+ts["/:FamilyFact1"].map({
+ ts["/:Homer"]: ts["/:Father"],
+ ts["/:Marge"]: ts["/:Mother"],
+ ts["/:Lisa"]: ts["/:Daughter"],
+})
+```
+To prevent name collisions and to better enable the usage of macros to
+automatically manipulate the structure, we have support for _scopes_. A scope
+is simply a prefix of a node name, up to (but not including) a `:`. Intuitively
+we can think of node names as paths, with `:` delimiting directory boundaries,
+and scopes playing the role of directories. For example, we might want to place
+`Father`, `Mother`, and `Daughter` all in the `/:Family` scope, and `Homer`,
+`Marge`, and `Lisa` in the `Simpsons` scope:
+```python
+ts["/:FamilyFact1"].map({
+ ts["/:Simpsons:Homer"]: ts["/:Family:Father"],
+ ts["/:Simpsons:Marge"]: ts["/:Family:Mother"],
+ ts["/:Simpsons:Lisa"]: ts["/:Family:Daughter"],
+})
+```
+In fact, `TSLang` has first-class support for scopes. By default, `ts[...]`
+always indexes relative to its _current scope_, which can be changed using
+`with ts.scope(...):`. The above is equivalent, for example, to:
+equivalent to:
+```python
+# family[":..."] is equivalent to ts["/:Family:..."]
+family = ts.scope("/:Family")
+with ts.scope("/:Simpsons"):
+ # In this block, ts[":..."] is automatically prepended with "/:Simpsons"
+ # while ts["/:..."] is taken as an absolute path.
+ ts["/:FamilyFact1"].map({
+ ts[":Homer"]: family[":Father"],
+ ts[":Marge"]: family[":Mother"],
+ ts[":Lisa"]: family[":Daughter"],
+ })
+# Outside the with block, ts[":..."] again refers to ts["/:..."].
+```
+Scopes are generally used to group logically-related nodes together, especially
+nodes which should be treated in a particular way by some macro (as we will see
+later in this document).
+
+### (2) Writing Update Rules to Operate on Triplet Structures
+An _update rule_ is a program that searches for a pattern in the triplet
+structure and then makes some modification to the structure according to that
+pattern. With `TSLang`, we _define update rules within the structure itself._
+We accomplish this by adding the pattern to search for as part of the
+structure, and adding extra facts which annotate which parts of the pattern
+need to be searched for or inserted/removed once an assignment is found.
+
+The below example demonstrates this with a rule expressing transitivity of the
+'greater than' relation:
+```python
+with ts.scope(":TransitivityRule"):
+ # First we describe the pattern we want to search for:
+ ts[":AGreaterThanB"].map({
+ ts[":A"]: ts["/:GreaterPair:Greater"],
+ ts[":B"]: ts["/:GreaterPair:Lesser"],
+ })
+ ts[":BGreaterThanC"].map({
+ ts[":B"]: ts["/:GreaterPair:Greater"],
+ ts[":C"]: ts["/:GreaterPair:Lesser"],
+ })
+ # Then what we want to insert if that pattern is found:
+ ts[":AGreaterThanC"].map({
+ ts[":A"]: ts["/:GreaterPair:Greater"],
+ ts[":C"]: ts["/:GreaterPair:Lesser"],
+ })
+ # Finally, we encode the details of the rule:
+ ts[":RuleFact"].map({
+ # /:TransitivityRule:_ will represent the rule as a whole.
+ ts[":_"]: ts["/RULE"],
+ # The facts implying A>B, B>C should already exist in the structure
+ # before we apply this rule, hence we 'must map' them.
+ ts[":AGreaterThanB"]: ts["/MUST_MAP"],
+ ts[":BGreaterThanC"]: ts["/MUST_MAP"],
+ ts[":A"]: ts["/MUST_MAP"],
+ ts[":B"]: ts["/MUST_MAP"],
+ ts[":C"]: ts["/MUST_MAP"],
+ # If they are found, we then 'insert' the A>C node and associated
+ # facts.
+ ts[":AGreaterThanC"]: ts["/INSERT"],
+ })
+```
+Note that the nodes `/RULE`, `/MUST_MAP`, and `/INSERT` do not start with `/:`.
+This indicates they are special nodes, which will be explicitly interpreted by
+the runtime. Also note that the nodes `/:GreaterPair:Greater` and
+`/:GreaterPair:Lesser` are not mentioned in the `:RuleFact`, which means they
+will be treated as constants in the corresponding pattern.
+
+Note also that, in addition to `/MUST_MAP`, other quantifications are possible:
+`NO_MAP(#)` and `TRY_MAP`. In general, potential rule applications are
+discovered in three passes:
+1. First, we search for assignments to the constraints involving only the
+ `/MUST_MAP` nodes.
+2. For each of those assignments, we check to ensure that they can*NOT* be
+ extended to a satisfying assignment to the `NO_MAP1`, `NO_MAP2`, ...
+ constraints. We throw away any assignment which can be extended to also
+ satisfy the `NO_MAP` constraints.
+3. For any remaining assignments, we attempt to extend the assignment to also
+ satisfy constraints involving the `TRY_MAP` nodes, if possible (otherwise
+ the original assignment is used).
+Similarly, in addition to `/INSERT` there are also other actions possible:
+1. `/REMOVE` removes the node and _all_ associated facts.
+2. `/SUBTRACT` removes only those facts explicitly mentioned by the rule (the
+ node is removed if there are then no remaining facts).
+
+Finally, note that by default we assume all of the `/MUST_MAP` nodes must have
+_unique_ assignments. This can be explicitly weakened if desired to allow
+pattern matches to assign the same node to two different `/MUST_MAP` variables.
+
+### (3) Macros for Writing Update Rules
+The above rule-declaration syntax can become tedious and error-prone if done by
+hand. To assist in this, two macros are provided in `ts_utils.py` which
+significantly improve the experience.
+
+#### `RegisterRule`
+The first, `RegisterRule`, is the most flexible. It works by putting nodes with
+the same role in the rule (e.g., `/INSERT` or `/MUST_MAP`) in the same scope.
+The rule from the previous example could be rewritten:
+```python
+with ts.scope(":TransitivityRule"):
+ with ts.scope(":MustMap") as existing:
+ ts[":AGreaterThanB"].map({
+ ts[":A"]: ts["/:GreaterPair:Greater"],
+ ts[":B"]: ts["/:GreaterPair:Lesser"],
+ })
+ ts[":BGreaterThanC"].map({
+ ts[":B"]: ts["/:GreaterPair:Greater"],
+ ts[":C"]: ts["/:GreaterPair:Lesser"],
+ })
+ with ts.scope(":Insert"):
+ ts[":AGreaterThanC"].map({
+ existing[":A"]: ts["/:GreaterPair:Greater"],
+ existing[":C"]: ts["/:GreaterPair:Lesser"],
+ })
+ RegisterRule(ts)
+```
+Notice that we need to explicitly refer to `existing[":A"]` in the second
+scope, as they are no longer all in the same scope.
+
+#### `RegisterPrototype`
+The second useful macro, `RegisterPrototype`, is useful when both:
+1. You only need `/MUST_MAP` and `/INSERT` nodes, and
+2. You want to define multiple rules which involve the same patterns.
+
+An equivalent rule to the above is shown below:
+```python
+with ts.scope(":TransitivityRule"):
+ ts[":AGreaterThanB"].map({
+ ts[":A"]: ts["/:GreaterPair:Greater"],
+ ts[":B"]: ts["/:GreaterPair:Lesser"],
+ })
+ ts[":BGreaterThanC"].map({
+ ts[":B"]: ts["/:GreaterPair:Greater"],
+ ts[":C"]: ts["/:GreaterPair:Lesser"],
+ })
+ ts[":AGreaterThanC"].map({
+ existing[":A"]: ts["/:GreaterPair:Greater"],
+ existing[":C"]: ts["/:GreaterPair:Lesser"],
+ })
+ RegisterPrototype(ts, dict({
+ ":_": {ts["/INSERT"]: [ts[":AGreaterThanC"]]},
+ }))
+```
+In general, the second argument can define arbitrarily many rules using the
+exact same set of nodes. Each rule lists some subset of nodes which should be
+inserted (alternatively, mapped). The remaining nodes in the scope are assumed
+to be mapped (alternatively, inserted). This feature is used extensively in
+`mapper.py`, as all of the mapper rules are essentially just different
+'shadings' of the same core pattern (see our Onward paper for more details).
+
+### (2,3b) Update Rule Gotchas: Inserting Only a Triplet Fact
+While our rule-declaration syntax is usually quite natural, there is one
+particular case where care needs to be taken. Namely, when the rule needs to
+operate on individual _facts_ about _existing nodes_. For example, suppose you
+want to search for nodes `A`, `B` with a fact `(A, A, B)`, and insert a new
+fact `(B, B, A)`. you might try to do the following:
+```python
+with ts.scope(":BadRule"):
+ with ts.scope(":MustMap") as exist:
+ ts[":A"].map({ts[":A"]: ts[":B"]})
+ with ts.scope(":Insert"):
+ exist[":B"].map({exist[":B"]: exist[":A"]})
+ RegisterRule(ts)
+```
+The problem is that the `/:BadRule:Insert` scope is actually empty --- the line
+there only refers to nodes in the `/:BadRule:MustMap` scope. Thus the
+underlying rule created will only have `/MUST_MAP` nodes, not `/INSERT`, and so
+the rule is effectively a no-op. To get around this, we need to ensure that at
+least one of the nodes of each fact we want to insert actually belongs to the
+`:Insert` scope:
+```python
+with ts.scope(":BadRule"):
+ with ts.scope(":MustMap") as exist:
+ ts[":A"].map({ts[":A"]: ts[":B"]})
+ with ts.scope(":Insert"):
+ ts[":B"].map({ts[":B"]: exist[":A"]})
+ RegisterRule(ts)
+```
+But now it will create an entirely new node, effectively `B2`, and fact
+`(B2, B, A)`! To resolve this, we need to explicitly tell the system that
+`:BadRule:Insert:B` refers to the same node as `:BadRule:MustMap:B`. This can
+be done using the `AssertNodesEqual` macro in `ts_utils.py`:
+```python
+with ts.scope(":BadRule"):
+ with ts.scope(":MustMap") as exist:
+ ts[":A"].map({ts[":A"]: ts[":B"]})
+ with ts.scope(":Insert") as insert:
+ ts[":B"].map({ts[":B"]: exist[":A"]})
+ RegisterRule(ts)
+ AssertNodesEqual(ts, [exist[":B"], insert[":B"]], "/:BadRule")
+```
+In fact, if the nodes in question end in the same name (after ignoring the
+`:MustMap` or `:Insert` scopes), as `:MustMap:B` and `:Insert:B` do, then
+`RegisterRule` can automatically assert their equality using the
+`auto_assert_equal` option:
+```python
+with ts.scope(":BadRule"):
+ with ts.scope(":MustMap") as exist:
+ ts[":A"].map({ts[":A"]: ts[":B"]})
+ with ts.scope(":Insert"):
+ ts[":B"].map({ts[":B"]: exist[":A"]})
+ RegisterRule(ts, auto_assert_equal=True)
+```
+
+### (4) Applying Update Rules
+After declaring the `TripletStructure` with some initial facts and update rules, we
+will initialize a new `TSRuntime` instance. The runtime will extract the rules
+we declared earlier and provide an interface to actually modify the structure
+using those rules. We initialize a `TSRuntime` like so:
+```python
+from runtime.runtime import TSRuntime
+
+# ... initializing tc ...
+rt = TSRuntime(ts)
+```
+
+`rt` exposes a somewhat lower-level API for applying rules:
+`rt.get_rule(rule_name)` returns a representation of the desired update rule.
+Given an update rule, `rt.propose(rule)` will yield possible assignments to the
+rule of the form `(assignment, delta)`. `assignment` describes the nodes
+satisfying the rule's pattern while `delta` describes the modification to the
+structure which should occur according to the rule.
+
+### (5) Tactics for Applying Update Rules
+`tactic_utils.py` defines a number of helpful functions, particularly
+`Fixedpoint`, for repeatedly applying a rule to the structure.
+
+### (6) Recording Changes to the Structure
+To assist with backtracking search, there are a number of tools available to
+checkpoint and rollback the state of a structure, as well as record changes.
+These are described in docstrings in `ts_lib.py`.
diff --git a/WORKSPACE b/WORKSPACE
new file mode 100644
index 0000000..b48e375
--- /dev/null
+++ b/WORKSPACE
@@ -0,0 +1,13 @@
+workspace(name = "sifter")
+
+load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
+
+git_repository(
+ name = "bazel_python",
+ commit = "bf2b329302f283d2d71eabf924d3b23186cb833e",
+ remote = "https://github.com/95616ARG/bazel_python.git",
+)
+
+load("@bazel_python//:bazel_python.bzl", "bazel_python")
+
+bazel_python()
diff --git a/analogy_utils.py b/analogy_utils.py
new file mode 100644
index 0000000..4e6f58e
--- /dev/null
+++ b/analogy_utils.py
@@ -0,0 +1,230 @@
+"""Python driver for interacting with the Mapper rules.
+
+See AnalogyUtils.md for documentation.
+"""
+from collections import defaultdict
+import itertools
+from tactic_utils import RuleFixedpoint, RuleAny, GetMatcher
+
+class Analogy:
+ """Represents a single analogy between two things in the structure."""
+ def __init__(self, rt, MAlphaA, MAlphaB, state="new"):
+ """Initialize referring to an existing analogy in the structure."""
+ self.rt = rt
+ self.ts = rt.ts
+ self.no_slip = rt.ts.scope("/:Mapper:NoSlipRules", protect=True)
+ self.MAlphaA = MAlphaA
+ self.MAlphaB = MAlphaB
+ assert state in ("new", "concretize", "exists")
+ self.state = state
+
+ def ExtendMap(self, relations=None, partial=None, no_follow=False,
+ no_top=False):
+ """Extend the analogy using a new map node.
+ """
+ partial = self.Partial(partial or dict())
+
+ if relations:
+ assert self.no_slip[":C"] not in partial
+ for relation in relations:
+ partial[self.no_slip[":C"]] = relation
+ if self.ExtendMap(None, partial, no_follow):
+ return True
+ return False
+
+ if self.state == "concretize":
+ proposals = itertools.chain(
+ self.Propose(self.no_slip[":Concretize?Map:_"], partial),
+ self.Propose(self.no_slip[":ConcretizeMap:_"], partial),
+ )
+ elif self.state == "exists":
+ proposals = self.Propose(self.no_slip[":ExistingMap:_"], partial)
+ else:
+ proposals = self.Propose(self.no_slip[":NewMap:_"], partial)
+ for assignment, _ in proposals:
+ if no_top and self.IsTop(assignment[self.no_slip[":B"]]):
+ continue
+ alpha = assignment[self.no_slip[":AlphaMAB"]]
+ if not self.Function(alpha) or not self.Injective(alpha):
+ continue
+ if self.ExtendFacts(alpha, no_follow=no_follow):
+ return True
+ return False
+
+ def ExtendFacts(self, alpha_MAB, no_follow=False):
+ """Extends the analogy to include facts about a new map node.
+
+ If @no_follow, then it will not concretize any nodes, only lift/lower
+ facts.
+ """
+ partial = self.Partial({self.no_slip[":AlphaMAB"]: alpha_MAB})
+ recording = self.ts.start_recording()
+ while not no_follow:
+ if self.state == "concretize":
+ proposals = self.Propose(self.no_slip[":Concretize?Concrete:_"], partial)
+ for assignment, _ in proposals:
+ alpha = assignment[self.no_slip[":AlphaAB"]]
+ if (self.Injective(alpha) and self.Function(alpha)
+ and self.ConcretizeFacts(alpha_MAB)):
+ break
+ else:
+ break
+ elif self.state == "exists":
+ proposals = self.Propose(self.no_slip[":ExistingConcrete:_"], partial)
+ for assignment, _ in proposals:
+ alpha = assignment[self.no_slip[":AlphaAB"]]
+ if (self.Injective(alpha) and self.Function(alpha)
+ and self.FactsLower(alpha_MAB)):
+ break
+ else:
+ break
+ else:
+ proposals = self.Propose(self.no_slip[":NewConcrete:_"], partial)
+ for assignment, _ in proposals:
+ alpha = assignment[self.no_slip[":AlphaAB"]]
+ if self.Function(alpha) and self.LiftFacts(alpha_MAB):
+ break
+ else:
+ break
+ if self.state == "concretize":
+ self.ConcretizeFacts(alpha_MAB)
+ else:
+ RuleFixedpoint(self.rt, self.no_slip[":NewFact:_"], partial)
+ if self.FactsMissing(alpha_MAB) or not self.FactsLower(alpha_MAB):
+ recording.rollback()
+ return False
+ return True
+
+ def LiftFacts(self, alpha_M):
+ """For every fact (m, ?, ?) try to add abstract fact (alpha_M, ?, ?).
+
+ Only lifts a fact if all nodes are already abstracted. If all facts are
+ lifted, returns True. If there are any facts which cannot be lifted, it
+ undoes its changes and returns False to indicate that these two nodes
+ probably shouldn't correspond to each other.
+ """
+ recording = self.ts.start_recording()
+ RuleFixedpoint(self.rt, self.no_slip[":NewFact:_"], self.Partial({
+ self.no_slip[":AlphaMAB"]: alpha_M,
+ }))
+ if self.FactsMissing(alpha_M):
+ recording.rollback()
+ return False
+ return True
+
+ def ConcretizeFacts(self, alpha_M):
+ """Adds concrete facts corresponding to each (@alpha_M, ?, ?) fact.
+ """
+ RuleFixedpoint(self.rt, self.no_slip[":Concretize?Fact:_"],
+ self.Partial({
+ self.no_slip[":AlphaMAB"]: alpha_M,
+ }))
+ return True
+
+ def FactsMissing(self, alpha_m):
+ """True iff there is some fact (m, ?, ?) not lifting to the abstract.
+
+ Assumes m is abstracted to alpha_m.
+
+ Used as a heuristics; we only lift 'm' to 'alpha_M' if we can lift all
+ of the relevant facts.
+ """
+ missing = self.ts.scope("/:Mapper:MissingFacts:MustMap", protect=True)
+ return RuleAny(self.rt, "/:Mapper:MissingFacts:_", dict({
+ missing[":AlphaMAB"]: alpha_m,
+ }))
+
+ def FactsLower(self, alpha_m):
+ """True iff all facts (@alpha_m, ?, ?) also hold in the concrete.
+ """
+ missing = self.ts.scope("/:Mapper:UnlowerableFacts:MustMap", protect=True)
+ return not RuleAny(self.rt, "/:Mapper:UnlowerableFacts:_", dict({
+ missing[":AlphaMAB"]: alpha_m,
+ }))
+
+ def Function(self, alpha):
+ """True iff nothing mapped to @alpha is also mapped to something else.
+ """
+ not_fn = self.ts.scope("/:Mapper:NotFunction:MustMap", protect=True)
+ return not RuleAny(self.rt, "/:Mapper:NotFunction:_", dict({
+ not_fn[":AlphaA1"]: alpha,
+ }))
+
+ def Injective(self, alpha):
+ """True iff at most one concrete node is mapped to @alpha.
+ """
+ not_inj = self.ts.scope("/:Mapper:NotInjective:MustMap", protect=True)
+ return not RuleAny(self.rt, "/:Mapper:NotInjective:_", dict({
+ not_inj[":AlphaA"]: alpha,
+ }))
+
+ def IsTop(self, node):
+ """True iff some fact node claims @node is TOP."""
+ return self.ts.lookup(None, node, "/:Mapper:TOP", read_direct=True)
+
+ @classmethod
+ def Begin(cls, rt, partial, exists=False, extend_here=True):
+ """Begin an analogy.
+
+ * Takes the first new analogy matching @partial.
+ * If @exists, it assumes there is an existing analogy between two
+ things and we want to include a third object in the comparison.
+ * If @extend_here, it calls ExtendFacts on the map matched when
+ starting the analogy.
+ """
+ no_slip = rt.ts.scope("/:Mapper:NoSlipRules", protect=True)
+ proposals = rt.propose(no_slip[":HotBegin:_"], partial)
+ if exists:
+ proposals = rt.propose(no_slip[":IntoExisting:_"], partial)
+ state = "exists" if exists else "new"
+ for assignment, _ in proposals:
+ analogy = cls(rt, assignment[no_slip[":MAlphaA"]],
+ assignment[no_slip[":MAlphaB"]], state=state)
+ if extend_here:
+ analogy.ExtendFacts(assignment[no_slip[":AlphaMAB"]])
+ return analogy
+
+ def Partial(self, start):
+ """Helper to extend @start to specialize to this specific analogy."""
+ start = start.copy()
+ start[self.no_slip[":MAlphaA"]] = self.MAlphaA
+ start[self.no_slip[":MAlphaB"]] = self.MAlphaB
+ return start
+
+ def Propose(self, rule, partial):
+ """Helper to get assignments to a rule in the structure."""
+ matcher = GetMatcher(self.rt, rule, partial)
+ matcher.sync()
+ return self.rt.matcher_propose(matcher)
+
+ def Get(self):
+ """Parse the analogy into a Python data structure.
+
+ Returns three dicts, from_A, from_B, and from_abstract. from_A and
+ from_B map concrete nodes to lists of abstract nodes. from_abstract maps
+ abstract nodes to pairs (A, B) of lists of nodes in the A and B side.
+
+ In both cases, the lists are empty or singletons unless the analogy is
+ non-injective or not-a-function.
+ """
+ from_A, from_B = defaultdict(list), defaultdict(list)
+ from_abstract = dict()
+
+ facts_a = self.ts.lookup(self.MAlphaA, None, None)
+ facts_b = self.ts.lookup(self.MAlphaB, None, None)
+ abstract_nodes = set(fact[2] for fact in facts_a + facts_b)
+ for abstract in sorted(abstract_nodes):
+ concr_as = [fact[1] for fact in facts_a if fact[2] == abstract]
+ concr_bs = [fact[1] for fact in facts_b if fact[2] == abstract]
+ from_abstract[abstract] = (concr_as, concr_bs)
+ for concr_a in concr_as:
+ from_A[concr_a].append(abstract)
+ for concr_b in concr_bs:
+ from_B[concr_b].append(abstract)
+ return from_A, from_B, from_abstract
+
+ def Print(self):
+ """Print a human-readable form of the analogy."""
+ _, _, from_abstract = self.Get()
+ for _, (concr_a, concr_b) in from_abstract.items():
+ print(concr_a, "<==>", concr_b)
diff --git a/examples/README.md b/examples/README.md
new file mode 100644
index 0000000..5aea697
--- /dev/null
+++ b/examples/README.md
@@ -0,0 +1,11 @@
+# Sifter and TSLang Examples
+This directory contains a number of examples demonstrating the use of TSLang in
+general and Sifter in particular.
+
+- `turing_machine/` demonstrates the use of `TSLang` rules to simulate a simple
+ Turing machine.
+- `letter_analogy/` shows how to use `mapper` to solve a simple letter analogy
+ of the form `abc->bcd`, `lmn->mno`, `def->?`.
+- `program_analysis/` shows how to use `mapper` to solve a number of different
+ analogy problems involving source code. In particular, it contains the demos
+ discussed in our Onward! 2020 paper.
diff --git a/examples/letter_analogies/BUILD b/examples/letter_analogies/BUILD
new file mode 100644
index 0000000..4cb5b36
--- /dev/null
+++ b/examples/letter_analogies/BUILD
@@ -0,0 +1,30 @@
+py_binary(
+ name = "letter_analogy",
+ srcs = ["letter_analogy.py"],
+ deps = [
+ ":letter_tactics",
+ "//:mapper",
+ "//:ts_lib",
+ "//:ts_utils",
+ "//runtime",
+ ],
+)
+
+py_binary(
+ name = "letter_tactics",
+ srcs = ["letter_tactics.py"],
+ deps = [
+ "//:analogy_utils",
+ "//:tactic_utils",
+ ],
+)
+
+py_test(
+ name = "test_letter_analogy",
+ size = "small",
+ srcs = ["test_letter_analogy.py"],
+ deps = [
+ ":letter_analogy",
+ "@bazel_python//:pytest_helper",
+ ],
+)
diff --git a/examples/letter_analogies/README.md b/examples/letter_analogies/README.md
new file mode 100644
index 0000000..71f275b
--- /dev/null
+++ b/examples/letter_analogies/README.md
@@ -0,0 +1,17 @@
+# Letter Analogy Example
+This directory contains code for solving simple letter analogies with Sifter.
+You can run the example from the root of the Sifter repository like so:
+```bash
+bazel run examples/letter_analogies:letter_analogy
+```
+It will print progress in solving the analogy, then print the solution `efg`.
+
+#### Files
+* `letter_analogy.py` contains code defining the letter-analogy problem as well
+ as update rules which identify, e.g., nodes that are the head of a letter
+ group.
+* `letter_tactics.py` contains semi-general-purpose tactics for solving such
+ letter analogies. It currently relies on a number of hand-picked heuristics
+ to find the analogy quickly.
+* `test_letter_analogy.py` is a Pytest test which ensures that
+ `letter_analogy.py` returns the correct result.
diff --git a/examples/letter_analogies/letter_analogy.py b/examples/letter_analogies/letter_analogy.py
new file mode 100644
index 0000000..77826ea
--- /dev/null
+++ b/examples/letter_analogies/letter_analogy.py
@@ -0,0 +1,222 @@
+"""An example TripletStructure for solving a letter analogy problem.
+
+Throughout this codebase we use the shorthand `tc` to represent the
+TripletStructure being operated on, and `rt` to represent the Runtime operating on
+that TripletStructure. I have also standardized to upper-case function names to
+differentiate the "front-end" code using ts_lib from the "plumbing" code that
+actually implements the DSL. These decisions can be undone if no one else likes
+them.
+"""
+import string # pylint: disable=deprecated-module
+from timeit import default_timer as timer
+from ts_lib import TripletStructure
+from ts_utils import RegisterPrototype, RegisterRule
+from runtime.runtime import TSRuntime
+from mapper import MapperCodelet
+from letter_tactics import SolveLetterAnalogy
+
+def Main(verbose=True):
+ """Initialize the structure and solve the letter analogy.
+ """
+ # This will hold our facts and nodes (including the rules and tactics!).
+ ts = TripletStructure()
+ # Add rules describing common letter relations, eg. 'Successor.'
+ LetterRelations(ts)
+ # Add (standardized) rules describing how to map/make analogies/join
+ # sub-structures. This can handle eg. abc -> bcd and lmn -> mno being
+ # 'abstracted' to x_1x_2x_3 -> y_1y_2y_3 with Succ(x_1, y_1), etc.
+ MapperCodelet(ts)
+
+ # Add three letter analogies, describing the problem to be solved. None
+ # indicates that the 'solution' should go there (it's indicated by marking
+ # the corresponding node with TOP).
+ LetterAnalogy(ts, ":Analogy_abc_bcd", "abc", "bcd")
+ LetterAnalogy(ts, ":Analogy_lmn_mno", "lmn", "mno")
+ LetterAnalogy(ts, ":Analogy_def_top", "def", None)
+
+ # Now that we have a structure (ts) which fully describes our problems,
+ # rules, and heuristics, we can initialize a TSRuntime to modify the
+ # structure according to the rules.
+ rt = TSRuntime(ts)
+
+ SolveLetterAnalogy(rt, verbose=verbose)
+
+ solution = ExtractLetterGroup(ts, ts["/:Analogy_def_top:To:_"])
+
+ if verbose:
+ print(f"Proposed solution: {solution}")
+
+ return solution
+
+def ExtractLetterGroup(ts, letter_group):
+ """Tries to extract a string representation of the letter group.
+
+ This is not always successful, as the representation might be invalid (eg.
+ cycles, or more than one NextPair:Right). In such cases it returns None.
+ """
+ try:
+ # NOTE: These may be ambiguous, to be sure we should have a few
+ # assert(len(...) == 1)s here.
+ letter_group = letter_group.full_name
+ head_map = ts.lookup(None, letter_group, "/:HeadPair:Container")[0][0]
+ head = ts.lookup(head_map, None, "/:HeadPair:Head")[0][1]
+ letters = ""
+ visited = set()
+ while True:
+ if head in visited:
+ return None
+ visited.add(head)
+
+ for letter in string.ascii_lowercase + string.ascii_uppercase:
+ is_letter = ts.lookup(None, head, Letter(ts, letter).full_name)
+ if is_letter:
+ letters += letter
+ break
+ else:
+ letters += "?"
+ next_map = ts.lookup(None, head, "/:NextPair:Left")
+ if not next_map:
+ break
+ next_map = next_map[0][0]
+ head = ts.lookup(next_map, None, "/:NextPair:Right")[0][1]
+ return letters
+ except IndexError:
+ return None
+
+def LetterAnalogy(ts, name, analogy_from, analogy_to):
+ """Adds nodes describing a particular letter analogy.
+
+ @name should be a unique node prefix, while @analogy_from and @analogy_to
+ should be strings describing the word analogy.
+ """
+ with ts.scope(name):
+ LetterGroup(ts, analogy_from, ts.scope(":From"))
+ LetterGroup(ts, analogy_to, ts.scope(":To"))
+ ts[":AnalogyMap"].map({
+ ts[":From:_"]: ts["/:Analogy:From"],
+ ts[":To:_"]: ts["/:Analogy:To"]
+ })
+
+def LetterGroup(ts, letters, scope):
+ """Adds nodes describing a string of letters (eg. one side of an analogy).
+
+ The entire group is represented by a node scope[:_] which is marked as the
+ owner of all other nodes.
+ """
+ with scope:
+ if letters is None:
+ ts[":IsTopMap:??"].map({ts[":_"]: ts["/:Mapper:TOP"]})
+ return
+ nodes = []
+ for i, letter in enumerate(letters):
+ node = ts[":Letter{}_{}".format(i, letter)]
+ nodes.append(node)
+ ts[":IsLetterMap:??"].map({node: Letter(ts, letter)})
+ ts[":IsOwned:??"].map({
+ scope[":_"]: ts["/:Owner"],
+ node: ts["/:Owned"],
+ })
+ for left_node, right_node in zip(nodes[:-1], nodes[1:]):
+ ts[":LRMap:??"].map({
+ left_node: ts["/:NextPair:Left"],
+ right_node: ts["/:NextPair:Right"],
+ })
+
+def Letter(ts, letter):
+ """Returns a reference to the node corresponding to character @letter.
+
+ This is sort of the "Platonic conception" of @letter.
+ """
+ return ts["/:Letter:{}".format(letter)]
+
+def LetterRelations(ts):
+ """Adds relations and rules describing strings of letters.
+ """
+ # Declaring these is not strictly necessary, as they will be created when
+ # first referenced, but I am listing them here for reference.
+ # pylint: disable=pointless-statement
+ with ts.scope(":NextPair"):
+ ts[":Left, :Right"]
+ with ts.scope(":SuccessorPair"):
+ ts[":Predecessor, :Successor"]
+ with ts.scope(":UpperPair"):
+ ts[":Lower, :Upper"]
+ with ts.scope(":HeadPair"):
+ ts[":Container, :Head"]
+
+ Headify(ts)
+
+ for predecessor, successor in zip(string.ascii_lowercase[:-1],
+ string.ascii_lowercase[1:]):
+ SuccessorPrototype(ts, predecessor, successor)
+ SuccessorPrototype(ts, predecessor.upper(), successor.upper())
+
+ for letter in string.ascii_lowercase:
+ UpperPrototype(ts, letter, letter.upper())
+
+def SuccessorPrototype(ts, predecessor, successor):
+ """Rules that identify and concretize successor pairs.
+
+ Note that this approach will create 3 rules for each pair (a, b); one that
+ creates Successor(a, b) from a and b, one that creates `a` given
+ Successor(a, b) and b, and one that creates `b` given Successor(a, b) and
+ `a`.
+
+ An alternative approach is to define three "Generic Binary Prototype"
+ rules, and then expose these as "examples" that that rule then maps
+ against. I think this is a more straight-forward approach for now, though
+ in the future (or if there's too much overhead with parsing the rules) we
+ can look at the other option.
+ """
+ with ts.scope(":Successor{}To{}".format(predecessor, successor)):
+ ts[":MA"].map({ts[":A"]: Letter(ts, predecessor)})
+ ts[":MB"].map({ts[":B"]: Letter(ts, successor)})
+ ts[":PairMap"].map({ts[":A"]: ts["/:SuccessorPair:Predecessor"]})
+ ts[":PairMap"].map({ts[":B"]: ts["/:SuccessorPair:Successor"]})
+
+ RegisterPrototype(ts, dict({
+ ":ConcretizePredecessor": {ts["/INSERT"]: [ts[":MA"]]},
+ ":ConcretizeSuccessor": {ts["/INSERT"]: [ts[":MB"]]},
+ ":ConcretizePair": {ts["/INSERT"]: [ts[":PairMap"]]},
+ }), equal=[])
+
+def UpperPrototype(ts, lowercase, uppercase):
+ """Rules for identifying and concretizing Upper pairs.
+
+ See SuccessorPrototype for notes on this implementation.
+ """
+ with ts.scope(":Upper{}To{}".format(lowercase, uppercase)):
+ ts[":MA"].map({ts[":A"]: Letter(ts, lowercase)})
+ ts[":MB"].map({ts[":B"]: Letter(ts, uppercase)})
+ ts[":PairMap"].map({ts[":A"]: ts["/:UpperPair:Lower"]})
+ ts[":PairMap"].map({ts[":B"]: ts["/:UpperPair:Upper"]})
+
+ RegisterPrototype(ts, dict({
+ ":ConcretizePredecessor": {ts["/INSERT"]: [ts[":MA"]]},
+ ":ConcretizeSuccessor": {ts["/INSERT"]: [ts[":MB"]]},
+ ":ConcretizePair": {ts["/INSERT"]: [ts[":PairMap"]]},
+ }), equal=[])
+
+def Headify(ts):
+ """Rules that identify the head letter of a letter group.
+ """
+ with ts.scope("/:HeadOfContainer"):
+ with ts.scope(":MustMap") as exist:
+ ts[":IsLeftOf"].map({ts[":LeftMost"]: ts["/:NextPair:Left"]})
+ ts[":IsMember"].map({
+ ts[":Container"]: ts["/:Owner"],
+ ts[":LeftMost"]: ts["/:Owned"],
+ })
+ with ts.scope(":NoMap"):
+ ts[":IsRightOf"].map({exist[":LeftMost"]: ts["/:NextPair:Right"]})
+ with ts.scope(":TryMap:Insert"):
+ ts[":IsHead"].map({
+ exist[":Container"]: ts["/:HeadPair:Container"],
+ exist[":LeftMost"]: ts["/:HeadPair:Head"],
+ })
+ RegisterRule(ts)
+
+if __name__ == "__main__":
+ start = timer()
+ Main()
+ print(timer() - start)
diff --git a/examples/letter_analogies/letter_tactics.py b/examples/letter_analogies/letter_tactics.py
new file mode 100644
index 0000000..0709ab5
--- /dev/null
+++ b/examples/letter_analogies/letter_tactics.py
@@ -0,0 +1,50 @@
+"""Heuristics for quickly solving letter analogy problems.
+
+Matthew considers this to be written "in the DSL," although it's somewhat on
+the border.
+"""
+from tactic_utils import ApplyRulesMatching, Fix, RuleFixedpoint
+from analogy_utils import Analogy
+
+def SolveLetterAnalogy(rt, verbose=True):
+ """Applies tactics for quickly solving letter analogies.
+
+ We have two sets of tactics. The first starts a mapping between two
+ sub-structures (the examples), and the second then maps the prompt against
+ that "abstraction."
+ """
+ maybe_print = lambda string: print(string) if verbose else None
+ maybe_print("Labeling heads of letter groups...")
+ RuleFixedpoint(rt, "/:HeadOfContainer:_")
+ maybe_print("Identifying successor pairs...")
+ ApplyRulesMatching(rt, "ConcretizePair")
+
+ maybe_print("Mapping the examples...")
+ no_slip = rt.ts.scope("/:Mapper:NoSlipRules", protect=True)
+ partial = dict({
+ no_slip[":C"]: "/:Analogy:From",
+ no_slip[":A"]: "/:Analogy_abc_bcd:From:_",
+ no_slip[":B"]: "/:Analogy_lmn_mno:From:_",
+ })
+ analogy = Analogy.Begin(rt, partial)
+ ExtendAnalogyTactic(analogy)
+
+ maybe_print("Mapping the prompt...")
+ partial[no_slip[":B"]] = "/:Analogy_def_top:From:_"
+ analogy = Analogy.Begin(rt, partial, exists=True)
+ ExtendAnalogyTactic(analogy)
+
+ maybe_print("Solving for letters...")
+ analogy.state = "concretize"
+ ExtendAnalogyTactic(analogy)
+ ApplyRulesMatching(rt, "ConcretizePredecessor")
+ ApplyRulesMatching(rt, "ConcretizeSuccessor")
+
+def ExtendAnalogyTactic(analogy):
+ """Heuristic for exploring a letter string.
+
+ Basically, we first look at the head and then move rightward.
+ """
+ Fix(analogy.ExtendMap, ["/:HeadPair:Container", "/:NextPair:Left"])
+ Fix(analogy.ExtendMap, ["/:SuccessorPair:Predecessor",
+ "/:SuccessorPair:Successor", "/:Owned"])
diff --git a/examples/letter_analogies/test_letter_analogy.py b/examples/letter_analogies/test_letter_analogy.py
new file mode 100644
index 0000000..27d51d4
--- /dev/null
+++ b/examples/letter_analogies/test_letter_analogy.py
@@ -0,0 +1,11 @@
+"""Integration test using letter_analogy.py"""
+# pylint: disable=pointless-statement,import-error
+from external.bazel_python.pytest_helper import main
+import letter_analogy
+
+def test_letter_analogy():
+ """Regression test for the letter analogy example."""
+ solution = letter_analogy.Main(verbose=False)
+ assert solution == "efg"
+
+main(__name__, __file__)
diff --git a/examples/program_analysis/BUILD b/examples/program_analysis/BUILD
new file mode 100644
index 0000000..6843d35
--- /dev/null
+++ b/examples/program_analysis/BUILD
@@ -0,0 +1,62 @@
+py_binary(
+ name = "api_migration",
+ srcs = ["api_migration.py"],
+ deps = [
+ ":analyzelib",
+ ":lazy_structure",
+ "//:tactic_utils",
+ "//examples/program_analysis/ui:serve",
+ "//runtime",
+ ],
+)
+
+py_binary(
+ name = "transform_learning",
+ srcs = ["transform_learning.py"],
+ deps = [
+ ":analyzelib",
+ ":lazy_structure",
+ "//:tactic_utils",
+ "//examples/program_analysis/ui:serve",
+ "//runtime",
+ ],
+)
+
+py_binary(
+ name = "program_understanding",
+ srcs = ["program_understanding.py"],
+ deps = [
+ ":analyzelib",
+ ":lazy_structure",
+ "//:tactic_utils",
+ "//examples/program_analysis/ui:serve",
+ "//runtime",
+ ],
+)
+
+py_library(
+ name = "lazy_structure",
+ srcs = ["lazy_structure.py"],
+ deps = [
+ "//:mapper",
+ "//:tactic_utils",
+ "//:ts_lib",
+ "//:ts_utils",
+ "//examples/program_analysis/ui:serve",
+ "//runtime",
+ ],
+)
+
+py_library(
+ name = "analyzelib",
+ srcs = ["analyzelib.py"],
+ deps = [
+ "//:analogy_utils",
+ "//:mapper",
+ "//:tactic_utils",
+ "//:ts_lib",
+ "//:ts_utils",
+ "//examples/program_analysis/ui:serve",
+ "//runtime",
+ ],
+)
diff --git a/examples/program_analysis/README.md b/examples/program_analysis/README.md
new file mode 100644
index 0000000..ab99d4b
--- /dev/null
+++ b/examples/program_analysis/README.md
@@ -0,0 +1,25 @@
+# Program Analysis Example
+This directory contains code for the Sifter demos presented in our Onward!
+2020 paper. You can run the examples from the root of the Sifter repository
+like
+so:
+```bash
+bazel run examples/program_analysis:program_understanding
+bazel run examples/program_analysis:transform_learning
+bazel run examples/program_analysis:api_migration
+```
+Each example will run, then prompt you to visit `http://localhost:8001` in a
+web browser which will show the result.
+
+#### Files
+* `program_understanding.py`: Section 3.1 demonstration of comparative program
+ understanding.
+* `transform_learning.py`: Section 3.2 demonstration of learning to generalize
+ a program optimization.
+* `api_migration.py`: Section 3.3 demonstration of learning to generalize
+ API migration examples.
+* `lazy_structure.py`: Classes to interface between source code files and
+ triplet structures.
+* `analyzelib.py`: Helper methods and tactics for the demos.
+* `ui/`: Interactive UI for displaying the result of the analogy-making
+ demonstrations.
diff --git a/examples/program_analysis/analyzelib.py b/examples/program_analysis/analyzelib.py
new file mode 100644
index 0000000..b7177f3
--- /dev/null
+++ b/examples/program_analysis/analyzelib.py
@@ -0,0 +1,244 @@
+"""Helper methods for analyzing program source code."""
+from collections import defaultdict
+from tactic_utils import ApplyRulesMatching, SearchRules, GetMatcher, Fix
+from mapper import MapperCodelet
+from lazy_structure import LazyTextDocument, SPECIAL_CHARACTERS
+from ts_utils import RegisterRule, AssertNodesEqual
+from analogy_utils import Analogy
+
+def LoadDocument(path, extra_chunks=None, extra_special=None):
+ """Load a document from the file system into a LazyTextDocument.
+
+ @extra_chunks: list of ranges of characters in the document that should be
+ chunked together.
+ @extra_special: list of special tokens that should be chunked together.
+ """
+ extra_special = extra_special or []
+ with open(path, "r") as in_file:
+ text = in_file.readlines()
+ chunks = []
+ for start, length in (extra_chunks or []):
+ chunks.append("".join(text[start:(start + length)]).lstrip(" "))
+ text = "".join(text)
+ return LazyTextDocument(text, chunks + extra_special + SPECIAL_CHARACTERS)
+
+def CompleteAnalogyTactic(structure, sources):
+ """Heuristic for completing an analogy involving code transformations.
+
+ @sources should be a list of before/after LazyTextDocuments.
+ @sources[0][0] and @sources[1][0] should be the before-code of
+ @sources[0][1] and @sources[1][1] respectively.
+ @sources[2][0] should be the prompt before code, and @sources[2][1] should
+ be a currently-empty LazyTextDocument to be generated by Sifter.
+ """
+ structure.ts.commit()
+ print("Identifying word pairs...")
+ ApplyRulesMatching(structure.rt, "SameWord", dict())
+
+ print("Mapping the examples...")
+ analogy = Analogy.Begin(structure.rt, dict({
+ "/:Mapper:NoSlipRules:A":
+ structure.NodeOfChunk(sources[0][0], sources[0][0].chunks[0]),
+ "/:Mapper:NoSlipRules:B":
+ structure.NodeOfChunk(sources[1][0], sources[1][0].chunks[0]),
+ "/:Mapper:NoSlipRules:C": "/:Chunk",
+ }), extend_here=["/:Document"])
+ ExtendAnalogyTactic(structure, analogy)
+
+ print("Mapping the prompt...")
+ analogy = Analogy.Begin(structure.rt, dict({
+ "/:Mapper:NoSlipRules:A":
+ structure.NodeOfChunk(sources[0][0], sources[0][0].chunks[0]),
+ "/:Mapper:NoSlipRules:B":
+ structure.NodeOfChunk(sources[2][0], sources[2][0].chunks[0]),
+ "/:Mapper:NoSlipRules:C": "/:Chunk",
+ }), exists=True, extend_here=["/:Document"])
+ ExtendAnalogyTactic(structure, analogy)
+
+ print("Solving the prompt...")
+ analogy.state = "concretize"
+ Fix(analogy.ExtendMap, ["/:Document"])
+ Fix(analogy.ExtendMap)
+ Fix(analogy.ExtendMap, no_follow=True)
+ Fix(ApplyRulesMatching, structure.rt, "SolveWord", dict())
+
+def ExtendAnalogyTactic(structure, analogy):
+ """Tactic to extend an analogy involving source code.
+
+ See AnalogyUtils.md for the basic idea. This method describes a certain
+ ordering of nodes in the breadth-first-search. We first connect the
+ before-code to its documentation and after-code. Then we follow tokens that
+ are adjacent to each other. Finally, we look at any facts about words being
+ the same as each other. CrossDoc is used to identify associated chunks
+ across documents.
+ """
+ Fix(analogy.ExtendMap, ["/:TransformPair:Before", "/:Documentation:Code"])
+ Fix(analogy.ExtendMap, ["/:Follower:Before", "/:Follower:After"])
+ CrossDoc(structure, analogy)
+ Fix(analogy.ExtendMap, ["/:Follower:Before", "/:Follower:After"])
+ CrossDoc(structure, analogy)
+ Fix(analogy.ExtendMap, ["/:Follower:Before", "/:Follower:After"])
+ Fix(analogy.ExtendMap, ["/:Chunk", "/:SameWord"], no_follow=True)
+ # If we have no_top=False, then it will map Documents:...:_IsMember before
+ # we start concretizing. But then the concretization pass will completely
+ # ignore _IsMember, so it will never concretize the contents. Basically, we
+ # just want to deal with the TOP nodes later, when concretizing.
+ Fix(analogy.ExtendMap, no_follow=True, no_top=True)
+
+FOLLOW_RELATIONS = [
+ "/:Follower:Before", "/:Follower:After", "/:Document",
+ "/:TransformPair:After",
+]
+
+def CrossDoc(structure, analogy):
+ """Tactic to bootstrap an analogy on a new document.
+
+ Basically, the idea here is if you've already marked two tokens as
+ corresponding between Documents 0 and 2, we may want to use that
+ information to mark two different tokens as corresponding in Document 1 and
+ 3. We would usually want to do this by following facts like 'SameWord.' But
+ these facts may not be unique (there could be 100 instances of the same
+ token), so CrossDoc looks to only follow the facts that are unique. See
+ AnalogyUtils.md for a more complete description.
+ """
+ ts, rt = structure.ts, structure.rt
+ scope = ts.scope("/:Rules:TagCrossDoc:MustMap", protect=True)
+ no_slip = rt.ts.scope("/:Mapper:NoSlipRules", protect=True)
+
+ rule = SearchRules(rt, "Tag")[0]
+ new_partial = dict({
+ "/:Rules:TagCrossDoc:MustMap:MAlphaA": analogy.MAlphaA,
+ "/:Rules:TagCrossDoc:MustMap:MAlphaB": analogy.MAlphaB,
+ })
+ matcher = GetMatcher(rt, rule, new_partial)
+ matcher.sync()
+ found = defaultdict(set)
+ for assign in matcher.assignments():
+ assigned = assign.assignment
+ key = (assigned[scope[":ChunkA"]], assigned[scope[":ChunkMapA"]], assigned[scope[":DocA'"]])
+ found[key].add(assigned[scope[":ChunkMapB"]])
+ for assign in matcher.assignments():
+ assigned = assign.assignment
+ key = (assigned[scope[":ChunkA"]], assigned[scope[":ChunkMapA"]], assigned[scope[":DocA'"]])
+ if len(found[key]) != 1:
+ continue
+ analogy.ExtendMap(partial=dict({
+ no_slip[":A"]: assigned[scope[":ChunkA"]],
+ no_slip[":B"]: assigned[scope[":ChunkB"]],
+ no_slip[":MA"]: assigned[scope[":ChunkMapA"]],
+ no_slip[":MB"]: assigned[scope[":ChunkMapB"]],
+ no_slip[":C"]: assigned[scope[":ChunkType"]],
+ }))
+
+def AnalyzeCodelets(ts):
+ """Adds helper codelets to identify patterns used in the tactics above."""
+ mapper = MapperCodelet(ts)
+ ts.add_node("/:TransformPair:Before")
+ ts.add_node("/:TransformPair:After")
+ ts.add_node("/:Documentation:Code")
+ ts.add_node("/:Documentation:Docs")
+ with ts.scope("/:Rules:SameWord"):
+ with ts.scope(":MustMap") as exists:
+ ts[":MA"].map({ts[":A"]: ts[":Word"]})
+ ts[":MB"].map({ts[":B"]: ts[":Word"]})
+ ts[":IsWord"].map({ts[":Word"]: ts["/:Word"]})
+ ts[":AMember"].map({
+ ts[":A"]: ts["/:Chunk"],
+ ts[":DocA"]: ts["/:Document"],
+ })
+ ts[":BMember"].map({
+ ts[":B"]: ts["/:Chunk"],
+ ts[":DocB"]: ts["/:Document"],
+ })
+ ts[":AreRelated"].map({
+ ts[":DocA"]: ts[":Doc1Relation"],
+ ts[":DocB"]: ts[":Doc2Relation"],
+ })
+ with ts.scope(":NoMap:Insert"):
+ ts[":AreSame"].map({
+ exists[":A"]: ts["/:SameWord"],
+ exists[":B"]: ts["/:SameWord"],
+ })
+ # Some of these may be unnecessary, they seem to slow things down as
+ # well. For the transformation ones we don't need relations within a
+ # doc.
+ ts[":??"].map({
+ ts[":_"]: ts["/RULE"],
+ exists[":AMember"]: ts["/MAYBE="],
+ exists[":BMember"]: ts["/MAYBE="],
+ exists[":AreRelated"]: ts["/MAYBE="],
+ })
+ ts[":??"].map({
+ ts[":_"]: ts["/RULE"],
+ exists[":DocA"]: ts["/MAYBE="],
+ exists[":DocB"]: ts["/MAYBE="],
+ })
+ ts[":??"].map({
+ ts[":_"]: ts["/RULE"],
+ exists[":Doc1Relation"]: ts["/MAYBE="],
+ exists[":Doc2Relation"]: ts["/MAYBE="],
+ })
+ RegisterRule(ts)
+ with ts.scope("/:Rules:SolveWord"):
+ with ts.scope(":MustMap") as exists:
+ ts[":MA"].map({ts[":A"]: ts[":Word"]})
+ ts[":IsWord"].map({ts[":Word"]: ts["/:Word"]})
+ ts[":AreSame"].map({
+ ts[":A"]: ts["/:SameWord"],
+ ts[":B"]: ts["/:SameWord"],
+ })
+ with ts.scope(":NoMap:Insert"):
+ ts[":MB"].map({exists[":B"]: exists[":Word"]})
+ RegisterRule(ts)
+
+ """Crossdoc tagger.
+
+ The idea here is to find chunks ChunkA in document A and ChunkB in document
+ B which are currently mapped together. We then want to find documents A',
+ B' with chunks ChunkA' and ChunkB' such that A and A' are related in the
+ same way as B and B', and ChunkA and ChunkA' are related in the same ways
+ as ChunkB and ChunkB'.
+
+ Furthermore, we don't want there to be any _other_ ChunkA', ChunkB' which
+ are related in that way. I don't think it's actually possible to express
+ that as a TS rule using the current set up, so we'll enforce that later in
+ the Python tactic.
+ """
+ mapper = ts.scope("/:Mapper", protect=True)
+ with ts.scope("/:Rules:TagCrossDoc"):
+ with ts.scope(":MustMap") as exists:
+ for x in ("A", "B"):
+ ts[f":IsAbstraction{x}"].map({
+ ts[f":MAlpha{x}"]: ts[mapper[":Abstraction"]]
+ })
+ for x in ("A", "A'", "B", "B'"):
+ ts[f":Doc{x}IsDoc"].map({
+ ts[f":Doc{x}"]: ts["/:Document"],
+ ts[f":Chunk{x}"]: ts["/:Chunk"],
+ })
+ for x in ("A", "B"):
+ ts[f":DocMap{x}"].map({
+ ts[f":Doc{x}"]: ts[":DocType"],
+ ts[f":Doc{x}'"]: ts[":Doc'Type"],
+ })
+ ts[f":ChunkMap{x}"].map({
+ ts[f":Chunk{x}"]: ts[":ChunkType"],
+ ts[f":Chunk{x}'"]: ts[":Chunk'Type"],
+ })
+ ts[f":MAlpha{x}"].map({
+ ts[f":Doc{x}"]: ts[":AlphaDoc"],
+ ts[f":Doc{x}'"]: ts[":AlphaDoc'"],
+ ts[f":Chunk{x}"]: ts[":AlphaChunk"],
+ })
+ RegisterRule(ts)
+ equivalence_classes = [
+ [":ChunkType", ":Chunk'Type"],
+ [":DocType", ":Doc'Type"],
+ [":DocA", ":DocB", ":DocA'", ":DocB'"],
+ [":DocMapA", ":DocMapB"],
+ [":DocAIsDoc", ":DocBIsDoc", ":DocA'IsDoc", ":DocB'IsDoc"],
+ [":ChunkA", ":ChunkB", ":ChunkA'", ":ChunkB'"],
+ ]
+ for equivalence_class in equivalence_classes:
+ equivalence_class = [exists[name] for name in equivalence_class]
+ AssertNodesEqual(ts, equivalence_class, "", equal_type="/MAYBE=")
diff --git a/examples/program_analysis/api_migration.py b/examples/program_analysis/api_migration.py
new file mode 100644
index 0000000..ddc3245
--- /dev/null
+++ b/examples/program_analysis/api_migration.py
@@ -0,0 +1,72 @@
+"""Learning to generalize an API migration."""
+import random
+import os
+from timeit import default_timer as timer
+from ui import serve
+from lazy_structure import LazyStructure
+from analyzelib import LoadDocument, AnalyzeCodelets, CompleteAnalogyTactic
+
+def Main():
+ """Runs the analogy-maker and displays the output."""
+ start = timer()
+ print("Setting up the structure...")
+ random.seed(24)
+ demo_path = os.environ.get("BUILD_WORKSPACE_DIRECTORY", ".")
+ demo_path += "/examples/program_analysis/paper_demos"
+ chunks = [(0, 7), (9, 5)]
+ sources = []
+ for i in range(1, 4):
+ before = LoadDocument(f"{demo_path}/api{i}.before.txt", chunks)
+ after = LoadDocument(f"{demo_path}/api{i}.after.txt",
+ chunks if i != 3 else [])
+ sources.append((before, after))
+
+ chunks = []
+ docs_before = LoadDocument(f"{demo_path}/docs.before.txt")
+ docs_after = LoadDocument(f"{demo_path}/docs.after.txt")
+
+ structure = LazyStructure(
+ [source for sourcelist in sources for source in sourcelist]
+ + [docs_before, docs_after], AnalyzeCodelets)
+
+ for document in structure.documents[:-2]:
+ for chunk in document.chunks:
+ structure.ChunkToNode(document, chunk)
+
+ def insertChunks(doc, chunk_texts):
+ for chunk in doc.chunks:
+ if doc.ChunkWord(chunk) in chunk_texts:
+ structure.ChunkToNode(doc, chunk)
+
+ doc_words = set({
+ "cam_record_video", "cam_record_audio", "cam_record_frame",
+ "On", "error", "failure", "returns",
+ "-1", "-2", "-3", "-4", "-5", "-6",
+ })
+ insertChunks(docs_before, doc_words)
+ insertChunks(docs_after, doc_words)
+
+ for i in range(0, 8, 2):
+ structure.AnnotateDocuments(dict({
+ i: "/:TransformPair:Before",
+ (i + 1): "/:TransformPair:After",
+ }))
+ if i != 6:
+ structure.AnnotateDocuments(dict({
+ i: "/:Documentation:Code",
+ 6: "/:Documentation:Docs",
+ }))
+ structure.AnnotateDocuments(dict({
+ (i + 1): "/:Documentation:Code",
+ 7: "/:Documentation:Docs",
+ }))
+
+ structure.MarkDocumentGenerated(5)
+ CompleteAnalogyTactic(structure, sources)
+ structure.GetGeneratedDocument(5)
+
+ print(timer() - start)
+ serve.start_server(structure)
+
+if __name__ == "__main__":
+ Main()
diff --git a/examples/program_analysis/lazy_structure.py b/examples/program_analysis/lazy_structure.py
new file mode 100644
index 0000000..682767a
--- /dev/null
+++ b/examples/program_analysis/lazy_structure.py
@@ -0,0 +1,305 @@
+"""Helper methods for encoding text documents in Triplet Structures.
+
+Here I will use:
+ 1. 'Word' to refer to an *abstract word*, such a 'hello'.
+ 2. 'Chunk' to refer to an instance of a word in a document.
+ 3. 'Node' or 'Symbol' refers to a node in the structure.
+
+Currently only implementing 'flat' reads (i.e., no ASTs), but the goal is to
+have the interface simple enough to support ASTs in a straight-forward way.
+"""
+from ts_lib import TripletStructure
+from runtime.runtime import TSRuntime
+
+SPECIAL_CHARACTERS = [
+ "(", ")", "[", "]", "{", "}", ".", ";", "*", "/", "+",
+ "&", '"', ",", "`", "\n",
+]
+
+class LazyStructure:
+ """Structure representing multiple LazyDocuments.
+ """
+ def __init__(self, documents, codelet):
+ """Initialize a LazyStructure given a collection of LazyDocuments.
+
+ @codelet(ts) is a callback that should initialize the TripletStructure.
+ """
+ self.ts = TripletStructure()
+
+ self.documents = documents
+ self.document_scopes = dict({
+ document: self.ts.scope(f"/:Documents:{i}")
+ for i, document in enumerate(documents)
+ })
+
+ # Set-union trick from: https://stackoverflow.com/questions/30773911
+ self.words = sorted(set().union(*(doc.words for doc in documents)))
+ # Maps words to symbol names in the structure.
+ self.dictionary = dict({
+ word: self.ts[f"/:Dictionary:{i}"]
+ for i, word in enumerate(self.words)
+ })
+
+ for symbol in self.dictionary.values():
+ self.ts[":IsWordMap"].map({symbol: self.ts["/:Word"]})
+
+ for scope in self.document_scopes.values():
+ scope[":_IsMember"].map({scope[":_"]: self.ts["/:Document"]})
+
+ self.ts.add_node("/:Chunk")
+
+ for document in self.documents:
+ document.InitializeWorkspace(self.ts)
+
+ codelet(self.ts)
+ self.rt = TSRuntime(self.ts)
+
+ def ChunkToNode(self, document, chunk):
+ """Adds a chunk explicitly into the workspace, if it's not already."""
+ assert chunk in document.chunks
+ scope = self.document_scopes[document]
+ chunk_node = document.ChunkToNode(chunk, self, scope)
+ if chunk_node is not None:
+ scope[":_IsMember"].map({chunk_node: self.ts["/:Chunk"]})
+ self.ts.commit(False)
+
+ def NodeOfChunk(self, document, chunk):
+ """Find the node in the workspace corresponding to a chunk."""
+ scope = self.document_scopes[document]
+ return document.NodeOfChunk(scope, chunk)
+
+ def GetGeneratedDocument(self, index):
+ """Parses a document (created by Sifter) out of the workspace.
+
+ Specifically, it updates self.documents[index] to point to a
+ LazyGeneratedTextDocument that describes the textual contents parsed
+ from the workspace. Used after Sifter completes an analogy to get the
+ corresponding code.
+ """
+ old_document = self.documents[index]
+ document = LazyGeneratedTextDocument(self, index)
+ self.documents[index] = document
+ self.document_scopes[document] = self.document_scopes[old_document]
+ self.document_scopes.pop(old_document)
+
+ def AnnotateDocuments(self, fact_map):
+ """Annotates the document.
+
+ @fact_map should be a map dict({doc_index: node}).
+ """
+ fact_node = self.ts["/:DocumentAnnotations:??"]
+ for doc_id, annotation in fact_map.items():
+ fact_node.map({
+ self.ts[f"/:Documents:{doc_id}:_"]: self.ts[annotation],
+ })
+
+ def MarkDocumentGenerated(self, index):
+ """Indicates that the @index document should be generated by Sifter."""
+ self.ts[f"/:DocumentAnnotations:??"].map({
+ self.ts[f"/:Documents:{index}:_"]: self.ts["/:Mapper:TOP"],
+ self.ts[f"/:Documents:{index}:_IsMember"]: self.ts["/:Mapper:TOP"],
+ })
+
+class LazyTextDocument:
+ """Represents a single text document."""
+ def __init__(self, text, special=None):
+ """Initializes the LazyTextDocument, including tokenization.
+
+ @special can contain a list of document-specific tokens.
+ """
+ self.text = text
+ # [(start, length)]
+ self.chunks = self.ChunkText(text, special)
+ self.words = set(map(self.ChunkWord, self.chunks))
+ self.annotations = []
+
+ def AnnotateChunks(self, fact_map):
+ """Annotates the document.
+
+ - @fact_map should be a map dict({chunk: node}).
+ Each node referenced in the map will be created when the structure is
+ initialized. The corresponding fact node will be created at runtime
+ when the first referenced chunk is created. Only supports one type per
+ concrete.
+ """
+ self.annotations.append(fact_map)
+
+ def InitializeWorkspace(self, ts):
+ """Add an initial set of facts to the workspace."""
+ for node in set({"/:Follower:Before", "/:Follower:After"}):
+ ts.add_node(node)
+ self.annotations = [
+ dict({key: ts[value] for key, value in fact_map.items()})
+ for fact_map in self.annotations]
+
+ def ChunkToNode(self, chunk, structure, scope):
+ """Returns a delta adding @chunk to @structure.ts.
+
+ Also returns the NodeWrapper corresponding to the chunk.
+ """
+ ts = structure.ts
+ chunk_start, _ = chunk
+ local_name = f":Chunks:{chunk_start}"
+ if local_name in scope:
+ return None
+
+ # (1) Add a node for the chunk.
+ chunk_node = scope[local_name]
+ # (2) Assert that it is an instance of the corresponding word.
+ word = structure.dictionary[self.ChunkWord(chunk)]
+ scope[f":IsWord:{chunk_start}"].map({chunk_node: word})
+ # (3) If the immediately-prior or immediately-following chunk is
+ # already in the structure, connect it. TODO(masotoud): Refactor this,
+ # also maybe add partial facts anyways?.
+ chunk_index = self.chunks.index(chunk)
+ if chunk_index > 0:
+ left_start, _ = self.chunks[chunk_index - 1]
+ left_node = scope.protected()[f":Chunks:{left_start}"]
+ if ts.has_node(left_node):
+ scope[f":Following:{left_start}:{chunk_start}"].map({
+ ts[left_node]: ts["/:Follower:Before"],
+ chunk_node: ts["/:Follower:After"],
+ })
+ if (chunk_index + 1) < len(self.chunks):
+ right_start, _ = self.chunks[chunk_index + 1]
+ right_node = scope.protected()[f":Chunks:{right_start}"]
+ if ts.has_node(right_node):
+ scope[f":Following:{right_start}:{chunk_start}"].map({
+ chunk_node: ts["/:Follower:Before"],
+ ts[right_node]: ts["/:Follower:After"],
+ })
+ # (4) If the chunk is annotated, include its annotation.
+ for i, fact_map in enumerate(self.annotations):
+ if chunk in fact_map:
+ scope[f":Annotations:{i}"].map({chunk_node: fact_map[chunk]})
+
+ return chunk_node
+
+ @staticmethod
+ def ChunkText(text, special):
+ """Tokenizes @text based on standard delimiters and @special.
+
+ Returns a list of (start_index, length) pairs. For example,
+ ChunkText("Hi 5+-2", ["-2"])
+ = [(0, 2), (2, 1), (3, 1), (4, 1), (5, 2)]
+ """
+ chunks = []
+ # Read characters from @text until either a space is reached or a
+ # special word.
+ start_chunk = None
+ i = 0
+ def maybe_wrap_chunk():
+ if start_chunk is not None:
+ chunks.append((start_chunk, i - start_chunk))
+ return None
+
+ while i < len(text):
+ if text[i] in (' ', '\n'):
+ start_chunk = maybe_wrap_chunk()
+ i += 1
+ else:
+ try:
+ word = next(word for word in special
+ if text[i:].startswith(word))
+ # If there was an existing chunk we were reading, wrap it
+ # up.
+ start_chunk = maybe_wrap_chunk()
+ # Then this word forms a new chunk.
+ chunks.append((i, len(word)))
+ i += len(word)
+ except StopIteration:
+ if start_chunk is None:
+ start_chunk = i
+ i += 1
+ start_chunk = maybe_wrap_chunk
+ return chunks
+
+ def ChunkWord(self, chunk):
+ """Returns the string corresponding to @chunk=(start_index, length)."""
+ start, length = chunk
+ return self.text[start:(start + length)]
+
+ def NodeOfChunk(self, scope, chunk):
+ """Returns the workspace node corresponding to @chunk."""
+ start, _ = chunk
+ return scope.protected()[f":Chunks:{start}"]
+
+ def FindChunk(self, chunk_word):
+ """Returns the first chunk with its string being @ChunkWord."""
+ for chunk in self.chunks:
+ if self.ChunkWord(chunk) == chunk_word:
+ return chunk
+ return None
+
+ def FindChunks(self, chunk_word):
+ """Returns all chunks with corresponding string being @ChunkWord."""
+ return [chunk for chunk in self.chunks
+ if self.ChunkWord(chunk) == chunk_word]
+
+class LazyGeneratedTextDocument(LazyTextDocument):
+ """Like LazyTextDocument, except the contents are read from the workspace.
+
+ The basic use for this is when Sifter _completes_ an analogy, and so
+ creates new code in the workspace. A LazyGeneratedTextDocument can read
+ that generated code out of the workspace.
+ """
+ def __init__(self, structure, index):
+ """Initialize the LazyGeneratedTextDocument."""
+ self.structure = structure
+ self.scope = structure.ts.scope(f"/:Documents:{index}", protect=True)
+ self.ExtractChunks()
+
+ def ExtractChunks(self):
+ """Parses the nodes in the workspace into a string representation."""
+ ts, scope = self.structure.ts, self.scope
+ chunks = set(
+ fact[1]
+ for fact in ts.lookup(scope[":_IsMember"], None, "/:Chunk"))
+ # (1) Get the poset of the nodes in this document.
+ orders = set()
+ for fact in ts.lookup(None, None, "/:Follower:Before"):
+ map_node, before_node, _ = fact
+ if before_node not in chunks:
+ continue
+ for fact in ts.lookup(map_node, None, "/:Follower:After"):
+ after_node = fact[1]
+ if after_node in chunks:
+ orders.add((before_node, after_node))
+ # (2) Extend the poset into a toset.
+ sorted_chunks = []
+ while chunks:
+ sorted_chunks.append([
+ chunk for chunk in sorted(chunks)
+ if not any(order[1] == chunk for order in orders)])
+ if not sorted_chunks[-1]:
+ sorted_chunks[-1] = sorted(chunks) # Cut loops.
+ chunks = chunks - set(sorted_chunks[-1])
+ orders = set(order for order in orders if set(order) <= chunks)
+ linear_chunks = []
+ for chunk_layer in sorted_chunks:
+ linear_chunks.extend(chunk_layer)
+ # (3) Translate the chunks to words.
+ chunk_words = []
+ for chunk in linear_chunks:
+ for word, node in self.structure.dictionary.items():
+ if ts.lookup(None, chunk, node.full_name):
+ chunk_words.append(word)
+ break
+ else:
+ chunk_words.append(f"[Chunk: {chunk}]")
+ for i, chunk_word in enumerate(chunk_words):
+ if chunk_word[-1] in (";", "{", "}"):
+ chunk_words[i] += "\n"
+ chunk_words[i] = chunk_words[i] + " "
+ self.text = ""
+ self.chunks = []
+ for word in chunk_words:
+ self.chunks.append((len(self.text), len(word)))
+ self.text += word
+ self.chunk_to_node = dict(zip(self.chunks, linear_chunks))
+
+ def ChunkToNode(self, chunk, structure, scope):
+ raise NotImplementedError
+
+ def NodeOfChunk(self, scope, chunk):
+ return self.chunk_to_node[chunk]
diff --git a/examples/program_analysis/paper_demos/api1.after.txt b/examples/program_analysis/paper_demos/api1.after.txt
new file mode 100644
index 0000000..d4f6957
--- /dev/null
+++ b/examples/program_analysis/paper_demos/api1.after.txt
@@ -0,0 +1,14 @@
+#include <stdio.h>
+#include "cam.h"
+
+#define BUFFER_SIZE (1024 * 1024)
+char buffer[BUFFER_SIZE];
+
+void try_record_video() {
+ int result = cam_record_video(buffer, BUFFER_SIZE);
+ if (result == -4) {
+ printf("Could not record video.\n");
+ } else {
+ printf("Recording video worked!\n");
+ }
+}
diff --git a/examples/program_analysis/paper_demos/api1.before.txt b/examples/program_analysis/paper_demos/api1.before.txt
new file mode 100644
index 0000000..0703da3
--- /dev/null
+++ b/examples/program_analysis/paper_demos/api1.before.txt
@@ -0,0 +1,14 @@
+#include <stdio.h>
+#include "cam.h"
+
+#define BUFFER_SIZE (1024 * 1024)
+char buffer[BUFFER_SIZE];
+
+void try_record_video() {
+ int result = cam_record_video(buffer, BUFFER_SIZE, RES_AUTO);
+ if (result == -1) {
+ printf("Could not record video.\n");
+ } else {
+ printf("Recording video worked!\n");
+ }
+}
diff --git a/examples/program_analysis/paper_demos/api2.after.txt b/examples/program_analysis/paper_demos/api2.after.txt
new file mode 100644
index 0000000..340e681
--- /dev/null
+++ b/examples/program_analysis/paper_demos/api2.after.txt
@@ -0,0 +1,14 @@
+#include <stdio.h>
+#include "cam.h"
+
+#define BUFFER_SIZE (1024 * 1024)
+char buffer[BUFFER_SIZE];
+
+void try_record_audio() {
+ int result = cam_record_audio(buffer, BUFFER_SIZE);
+ if (result == -2) {
+ printf("Could not record audio.\n");
+ } else {
+ printf("Recorded audio!\n");
+ }
+}
diff --git a/examples/program_analysis/paper_demos/api2.before.txt b/examples/program_analysis/paper_demos/api2.before.txt
new file mode 100644
index 0000000..f74c538
--- /dev/null
+++ b/examples/program_analysis/paper_demos/api2.before.txt
@@ -0,0 +1,14 @@
+#include <stdio.h>
+#include "cam.h"
+
+#define BUFFER_SIZE (1024 * 1024)
+char buffer[BUFFER_SIZE];
+
+void try_record_audio() {
+ int result = cam_record_audio(buffer, BUFFER_SIZE, RES_AUTO);
+ if (result == -5) {
+ printf("Could not record audio.\n");
+ } else {
+ printf("Recorded audio!\n");
+ }
+}
diff --git a/examples/program_analysis/paper_demos/api3.after.txt b/examples/program_analysis/paper_demos/api3.after.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/examples/program_analysis/paper_demos/api3.after.txt
diff --git a/examples/program_analysis/paper_demos/api3.before.txt b/examples/program_analysis/paper_demos/api3.before.txt
new file mode 100644
index 0000000..cb8d731
--- /dev/null
+++ b/examples/program_analysis/paper_demos/api3.before.txt
@@ -0,0 +1,14 @@
+#include <stdio.h>
+#include "cam.h"
+
+#define BUFFER_SIZE (1024)
+char buffer[BUFFER_SIZE];
+
+void try_record_still() {
+ int result = cam_record_frame(buffer, BUFFER_SIZE, RES_AUTO);
+ if (result == -3) {
+ printf("Could not record audio.\n");
+ } else {
+ printf("Recorded audio!\n");
+ }
+}
diff --git a/examples/program_analysis/paper_demos/bash.txt b/examples/program_analysis/paper_demos/bash.txt
new file mode 100644
index 0000000..083401c
--- /dev/null
+++ b/examples/program_analysis/paper_demos/bash.txt
@@ -0,0 +1,17 @@
+int cd_builtin (list) WORD_LIST *list; /*@\color{red}{//B1} @*/
+{
+ char *dirname, *cdpath, *path, *temp;
+...
+
+struct builtin static_shell_builtins[] = {
+...
+ { "cd", cd_builtin, ... }, /*@\color{red}{//B2} @*/
+...
+struct builtin *shell_builtins = static_shell_builtins; /*@\color{red}{//B3} @*/
+
+struct builtin * builtin_address_internal
+ (name, disabled_okay)
+ char *name; int disabled_okay; { /*@\color{red}{//B4} @*/
+...
+ j = shell_builtins[mid].name[0] - name[0];
+...
diff --git a/examples/program_analysis/paper_demos/docs.after.txt b/examples/program_analysis/paper_demos/docs.after.txt
new file mode 100644
index 0000000..e162420
--- /dev/null
+++ b/examples/program_analysis/paper_demos/docs.after.txt
@@ -0,0 +1,9 @@
+# CameraLib v2.0
+### `cam_record_video(buffer, buffer_size)`
+Records video from the main camera into `buffer` until `buffer_size` bytes are reached. On error returns -4.
+
+### `cam_record_audio(buffer, buffer_size)`
+Uses the main camera's microphone to record audio into `buffer` until `buffer_size` bytes have been recorded. On error returns -2.
+
+### `cam_record_frame(buffer, buffer_size)`
+Uses the main camera to record a single image to `buffer`. Automatically sets the resolution to fit in `buffer_size`. On failure returns -6.
diff --git a/examples/program_analysis/paper_demos/docs.before.txt b/examples/program_analysis/paper_demos/docs.before.txt
new file mode 100644
index 0000000..1f10feb
--- /dev/null
+++ b/examples/program_analysis/paper_demos/docs.before.txt
@@ -0,0 +1,9 @@
+# CameraLib v1.0
+### `cam_record_video(buffer, buffer_size, resolution)`
+Records video from the main camera into `buffer` until `buffer_size` bytes are reached. On error returns -1.
+
+### `cam_record_audio(buffer, buffer_size, resolution)`
+Uses the main camera's microphone to record audio into `buffer` until `buffer_size` bytes have been recorded. On error returns -5.
+
+### `cam_record_frame(buffer, buffer_size, resolution)`
+Uses the main camera to record a single image to `buffer`. Automatically sets the resolution to fit in `buffer_size`. On failure returns -3.
diff --git a/examples/program_analysis/paper_demos/fish.txt b/examples/program_analysis/paper_demos/fish.txt
new file mode 100644
index 0000000..2b933f0
--- /dev/null
+++ b/examples/program_analysis/paper_demos/fish.txt
@@ -0,0 +1,13 @@
+int builtin_cd(parser_t &parser, io_streams_t &streams, wchar_t **argv) { /*@\color{red}{//F1} @*/
+ const wchar_t *cmd = argv[0];
+ int argc = builtin_count_args(argv);
+...
+
+static const builtin_data_t builtin_datas[] = {
+...
+ {L"cd", &builtin_cd, ...}, /*@\color{red}{//F2} @*/
+...
+
+static const builtin_data_t *builtin_lookup(const wcstring &name) { /*@\color{red}{//F3} @*/
+ const builtin_data_t *array_end = builtin_datas + BUILTIN_COUNT;
+...
diff --git a/examples/program_analysis/paper_demos/gemm1.after.txt b/examples/program_analysis/paper_demos/gemm1.after.txt
new file mode 100644
index 0000000..5d49a4c
--- /dev/null
+++ b/examples/program_analysis/paper_demos/gemm1.after.txt
@@ -0,0 +1,6 @@
+assert(k > 0);
+int outer = k * 100;
+int inner = k * 10;
+read_mat(outer, inner, &A);
+read_mat(inner, outer, &B);
+gemm_skinny(A, B, &C, outer, inner, outer);
diff --git a/examples/program_analysis/paper_demos/gemm1.before.txt b/examples/program_analysis/paper_demos/gemm1.before.txt
new file mode 100644
index 0000000..5ff9d2b
--- /dev/null
+++ b/examples/program_analysis/paper_demos/gemm1.before.txt
@@ -0,0 +1,6 @@
+assert(k > 0);
+int outer = k * 100;
+int inner = k * 10;
+read_mat(outer, inner, &A);
+read_mat(inner, outer, &B);
+gemm_large(A, B, &C, outer, inner, outer);
diff --git a/examples/program_analysis/paper_demos/gemm2.after.txt b/examples/program_analysis/paper_demos/gemm2.after.txt
new file mode 100644
index 0000000..19bb8aa
--- /dev/null
+++ b/examples/program_analysis/paper_demos/gemm2.after.txt
@@ -0,0 +1,10 @@
+assert(k > 1);
+int outer = k, A_cols = k / 2;
+read_mat(outer, A_cols, &A);
+read_mat(A_cols, outer, &B);
+while (!done(A, B)) {
+ read_row(&A);
+ read_col(&B);
+ outer++;
+}
+gemm_skinny(A, B, &C, outer, A_cols, outer);
diff --git a/examples/program_analysis/paper_demos/gemm2.before.txt b/examples/program_analysis/paper_demos/gemm2.before.txt
new file mode 100644
index 0000000..6e0ab1c
--- /dev/null
+++ b/examples/program_analysis/paper_demos/gemm2.before.txt
@@ -0,0 +1,10 @@
+assert(k > 1);
+int outer = k, A_cols = k / 2;
+read_mat(outer, A_cols, &A);
+read_mat(A_cols, outer, &B);
+while (!done(A, B)) {
+ read_row(&A);
+ read_col(&B);
+ outer++;
+}
+gemm_large(A, B, &C, outer, A_cols, outer);
diff --git a/examples/program_analysis/paper_demos/gemm3.after.txt b/examples/program_analysis/paper_demos/gemm3.after.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/examples/program_analysis/paper_demos/gemm3.after.txt
diff --git a/examples/program_analysis/paper_demos/gemm3.before.txt b/examples/program_analysis/paper_demos/gemm3.before.txt
new file mode 100644
index 0000000..3cd1072
--- /dev/null
+++ b/examples/program_analysis/paper_demos/gemm3.before.txt
@@ -0,0 +1,6 @@
+assert(k > 5);
+int AB_rowcol = k;
+int inner = k * k;
+read_mat(AB_rowcol, inner, &A);
+read_mat(inner, AB_rowcol, &B);
+gemm_large(A, B, &C, AB_rowcol, inner, AB_rowcol);
diff --git a/examples/program_analysis/paper_demos/gemm4.bad.txt b/examples/program_analysis/paper_demos/gemm4.bad.txt
new file mode 100644
index 0000000..a57dc33
--- /dev/null
+++ b/examples/program_analysis/paper_demos/gemm4.bad.txt
@@ -0,0 +1,6 @@
+assert(k > 0);
+int outer = k * 10;
+int inner = k * 10;
+read_mat(outer, inner, &A);
+read_mat(inner, outer, &B);
+gemm_large(A, B, &C);
diff --git a/examples/program_analysis/program_understanding.py b/examples/program_analysis/program_understanding.py
new file mode 100644
index 0000000..2b1e141
--- /dev/null
+++ b/examples/program_analysis/program_understanding.py
@@ -0,0 +1,72 @@
+"""Comparative program understanding between shell implementation snippets."""
+import random
+import os
+from timeit import default_timer as timer
+from tactic_utils import Fix, ApplyRulesMatching
+from ui import serve
+from lazy_structure import LazyStructure
+from analyzelib import LoadDocument, AnalyzeCodelets
+from analogy_utils import Analogy
+
+def Main():
+ """Runs the analogy-maker and displays the output."""
+ start = timer()
+ print("Setting up the structure...")
+ random.seed(24)
+ demo_path = os.environ.get("BUILD_WORKSPACE_DIRECTORY", ".")
+ demo_path += "/examples/program_analysis/paper_demos"
+
+ extra_special = [
+ "static_shell_builtins", "shell_builtins", "builtin_address_internal",
+ "builtin_datas", "builtin_lookup", "_builtin", "builtin_"]
+ bash = LoadDocument(f"{demo_path}/bash.txt", extra_special=extra_special)
+ fish = LoadDocument(f"{demo_path}/fish.txt", extra_special=extra_special)
+
+ # These could be provided, eg., by an AST, or we could even have Sifter
+ # rewrite rules which identify them. We can also make each the membor of
+ # its sub-struct then make the sub-struct a member of the parent struct.
+ bash.AnnotateChunks(dict({
+ bash.FindChunks("shell_builtins")[0]: "/:Semantics:Collection",
+ bash.FindChunks("cd")[2]: "/:Semantics:CollectionMember",
+ }))
+ fish.AnnotateChunks(dict({
+ fish.FindChunks("builtin_datas")[0]: "/:Semantics:Collection",
+ fish.FindChunks("cd")[2]: "/:Semantics:CollectionMember",
+ }))
+ bash.AnnotateChunks(dict({
+ bash.FindChunks("shell_builtins")[-1]: "/:Semantics:FunctionBody",
+ bash.FindChunks("builtin_address_internal")[-1]: "/:Semantics:Function",
+ }))
+ fish.AnnotateChunks(dict({
+ fish.FindChunks("builtin_datas")[-1]: "/:Semantics:FunctionBody",
+ fish.FindChunks("builtin_lookup")[-1]: "/:Semantics:Function",
+ }))
+
+ structure = LazyStructure([bash, fish], AnalyzeCodelets)
+ for document in structure.documents:
+ for chunk in document.chunks:
+ structure.ChunkToNode(document, chunk)
+
+ print("Identifying word pairs...")
+ for word in ["cd", "shell_builtins", "builtin_datas"]:
+ ApplyRulesMatching(structure.rt, "SameWord", dict({
+ "/:Rules:SameWord:MustMap:Word": structure.dictionary[word].full_name,
+ }))
+
+ print("Finding the analogy...")
+ analogy = Analogy.Begin(structure.rt, dict({
+ "/:Mapper:NoSlipRules:A":
+ structure.NodeOfChunk(bash, bash.FindChunk("cd")),
+ "/:Mapper:NoSlipRules:B":
+ structure.NodeOfChunk(fish, fish.FindChunk("cd")),
+ }), extend_here=False)
+
+ Fix(analogy.ExtendMap,
+ ["/:Semantics:CollectionMember", "/:Semantics:Function",
+ "/:Semantics:FunctionBody", "/:SameWord"])
+
+ print(timer() - start)
+ serve.start_server(structure)
+
+if __name__ == "__main__":
+ Main()
diff --git a/examples/program_analysis/transform_learning.py b/examples/program_analysis/transform_learning.py
new file mode 100644
index 0000000..a790d84
--- /dev/null
+++ b/examples/program_analysis/transform_learning.py
@@ -0,0 +1,61 @@
+"""Learning to generalize a program optimization."""
+import random
+import os
+from timeit import default_timer as timer
+from ui import serve
+from lazy_structure import LazyStructure
+from analyzelib import LoadDocument, AnalyzeCodelets, CompleteAnalogyTactic
+
+def Main():
+ """Runs the analogy-maker and displays the output."""
+ start = timer()
+ print("Setting up the structure...")
+ random.seed(24)
+ demo_path = os.environ.get("BUILD_WORKSPACE_DIRECTORY", ".")
+ demo_path += "/examples/program_analysis/paper_demos"
+ sources = []
+ for i in range(1, 4):
+ with open(f"{demo_path}/gemm{i}.before.txt", "r") as peek:
+ n_lines = len(peek.readlines())
+ chunks = [(0, n_lines - 1)]
+ before = LoadDocument(f"{demo_path}/gemm{i}.before.txt", chunks)
+ after = LoadDocument(f"{demo_path}/gemm{i}.after.txt", chunks)
+ sources.append((before, after))
+
+ def annotateGTHalf(doc, outer_name, inner_name):
+ outer_chunks = list(doc.FindChunks(outer_name))[-2:]
+ inner_chunk = list(doc.FindChunks(inner_name))[-1]
+ for outer_chunk in outer_chunks:
+ doc.AnnotateChunks(dict({
+ outer_chunk: "/:Semantics:Greater",
+ inner_chunk: "/:Semantics:LTHalf",
+ }))
+
+ # This could be provided, eg., by an abstract interpreter.
+ annotateGTHalf(sources[0][0], "outer", "inner")
+ annotateGTHalf(sources[1][0], "outer", "A_cols")
+ annotateGTHalf(sources[2][0], "AB_rowcol", "inner")
+
+ structure = LazyStructure(
+ [source for sourcelist in sources for source in sourcelist],
+ AnalyzeCodelets)
+
+ for document in structure.documents:
+ for chunk in document.chunks:
+ structure.ChunkToNode(document, chunk)
+
+ for i in range(0, 6, 2):
+ structure.AnnotateDocuments(dict({
+ i: "/:TransformPair:Before",
+ (i + 1): "/:TransformPair:After",
+ }))
+
+ structure.MarkDocumentGenerated(5)
+ CompleteAnalogyTactic(structure, sources)
+ structure.GetGeneratedDocument(5)
+
+ print(timer() - start)
+ serve.start_server(structure)
+
+if __name__ == "__main__":
+ Main()
diff --git a/examples/program_analysis/ui/BUILD b/examples/program_analysis/ui/BUILD
new file mode 100644
index 0000000..9d28957
--- /dev/null
+++ b/examples/program_analysis/ui/BUILD
@@ -0,0 +1,12 @@
+py_library(
+ name = "serve",
+ srcs = ["serve.py"],
+ visibility = ["//:__subpackages__"],
+ deps = [":lazy_structure_parser"],
+)
+
+py_library(
+ name = "lazy_structure_parser",
+ srcs = ["lazy_structure_parser.py"],
+ visibility = ["//:__subpackages__"],
+)
diff --git a/examples/program_analysis/ui/index.html b/examples/program_analysis/ui/index.html
new file mode 100644
index 0000000..82cc511
--- /dev/null
+++ b/examples/program_analysis/ui/index.html
@@ -0,0 +1,13 @@
+<!DOCTYPE html>
+<html>
+ <head>
+ <title>Sifter</title>
+ <script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.5.0/jquery.min.js" integrity="sha256-xNzN2a4ltkB44Mc/Jz3pT4iU1cmeR0FkXs4pru/JxaQ=" crossorigin="anonymous"></script>
+ <script src="leader-line.min.js"></script>
+ <script src="plain-draggable.min.js"></script>
+ <link href="style.css" type="text/css" rel="stylesheet"></link>
+ </head>
+ <body>
+ <script src="index.js"></script>
+ </body>
+</html>
diff --git a/examples/program_analysis/ui/index.js b/examples/program_analysis/ui/index.js
new file mode 100644
index 0000000..4f4c49c
--- /dev/null
+++ b/examples/program_analysis/ui/index.js
@@ -0,0 +1,74 @@
+var base = "http://127.0.0.1:8001"
+
+var friends = new Map();
+var chunk_containers = new Map();
+function load_structure(structure) {
+ console.log(structure);
+ var documents = structure["documents"];
+ for (var i = 0; i < documents.length; i++) {
+ var document_container = $("<div>").appendTo($("body"));
+ console.log(documents[i]);
+ var chunks = documents[i].chunks;
+ for (var j = 0; j < chunks.length; j++) {
+ var chunk_container = $("<span>").appendTo(document_container);
+ var text = documents[i].text.substr(chunks[j][1], chunks[j][2]);
+ chunk_container.text(text);
+ if (chunks[j][0] != false) {
+ chunk_container.addClass("chunk-in-structure");
+ chunk_container.attr("data-chunk-gid", chunks[j][0]);
+ chunk_containers.set(chunks[j][0], chunk_container);
+ } else {
+ chunk_container.attr("data-chunk-gid", "");
+ }
+ }
+ document_container.addClass("document");
+ if (documents[i].generated) {
+ document_container.addClass("generated");
+ }
+ var dragger = new PlainDraggable(document_container.get()[0]);
+ }
+ var maps = structure["maps"];
+ for (var i = 0; i < maps.length; i++) {
+ var map = maps[i];
+ for (var j = 0; j < map.length; j++) {
+ if (!friends.has(map[j])) {
+ friends.set(map[j], []);
+ }
+ chunk_containers.get(map[j]).addClass("chunk-in-map");
+ for (var k = 0; k < map.length; k++) {
+ friends.get(map[j]).push(map[k]);
+ }
+ }
+ }
+ console.log(chunk_containers);
+}
+
+$("body").on("mouseover", ".chunk-in-structure", function() {
+ if ($(this).attr("data-chunk-gid") == "") {
+ return;
+ }
+ var my_friends = friends.get($(this).attr("data-chunk-gid"));
+ if (my_friends === undefined) {
+ return;
+ }
+ for (var i = 0; i < my_friends.length; i++) {
+ chunk_containers.get(my_friends[i]).addClass("highlight");
+ }
+});
+
+$("body").on("mouseout", ".chunk-in-structure", function() {
+ if ($(this).attr("data-chunk-gid") == "") {
+ return;
+ }
+ var my_friends = friends.get($(this).attr("data-chunk-gid"));
+ if (my_friends === undefined) {
+ return;
+ }
+ for (var i = 0; i < my_friends.length; i++) {
+ chunk_containers.get(my_friends[i]).removeClass("highlight");
+ }
+});
+
+$.get(base + "/Structure", function (data) {
+ load_structure(data);
+});
diff --git a/examples/program_analysis/ui/lazy_structure_parser.py b/examples/program_analysis/ui/lazy_structure_parser.py
new file mode 100644
index 0000000..bae2dea
--- /dev/null
+++ b/examples/program_analysis/ui/lazy_structure_parser.py
@@ -0,0 +1,68 @@
+"""Parse a LazyStructure to extract abstractions for the UI."""
+from collections import defaultdict
+from lazy_structure import LazyGeneratedTextDocument
+
+def parse_lazy_structure(structure):
+ """Returns a dict representation of a LazyStructure.
+
+ Returns a dictionary with
+ {
+ "documents": [
+ {
+ "text": str,
+ "chunks": [(global_id, start, length)],
+ }
+ ],
+ "maps": [
+ [chunk_1_gid, chunk_2_gid, ...]
+ ],
+ }
+
+ maps[i][j] lists all chunks corresponding to the jth node in abstraction i.
+ """
+ ts = structure.ts
+ parsed = dict({"documents": [], "maps": []})
+ # (1) Add the documents.
+ all_chunk_nodes = set()
+ for document in structure.documents:
+ parsed_doc = dict({
+ "text": document.text,
+ "chunks": [],
+ "generated": isinstance(document, LazyGeneratedTextDocument),
+ })
+ for chunk in document.chunks:
+ chunk_node = structure.NodeOfChunk(document, chunk)
+ if ts.has_node(chunk_node):
+ start, length = chunk
+ parsed_doc["chunks"].append((chunk_node, start, length))
+ all_chunk_nodes.add(chunk_node)
+ # Add fake chunks for the rest of the document.
+ parsed_doc["chunks"] = pad_chunks(parsed_doc["chunks"], document)
+ parsed["documents"].append(parsed_doc)
+ # (2) Add the maps.
+ abstract_nodes = defaultdict(set)
+ for fact in ts.lookup(None, None, "/:Mapper:Abstraction"):
+ for other_fact in ts.lookup(fact[1], None, None):
+ if other_fact[1] in all_chunk_nodes:
+ abstract_nodes[other_fact[2]].add(other_fact[1])
+ parsed["maps"] = sorted(map(sorted, abstract_nodes.values()))
+ return parsed
+
+def pad_chunks(chunks, document):
+ """Fills in gaps in @chunks.
+
+ For example, @chunks may only contain chunks of the document which have
+ actually been added to the structure. This iterates over @chunks, and
+ anywhere a gap is found it inserts a new chunk with the node name as False.
+
+ @chunks should be a list of triples (node_name, start, length). The return
+ value is of the same format.
+ """
+ padded = [(False, 0, 0)]
+ chunks = chunks + [(False, len(document.text), 0)]
+ for (global_id, start, length) in chunks:
+ last_end = padded[-1][1] + padded[-1][2]
+ if last_end < start:
+ padded.append((False, last_end, start - last_end))
+ padded.append((global_id, start, length))
+ return padded[1:-1]
diff --git a/examples/program_analysis/ui/leader-line.min.js b/examples/program_analysis/ui/leader-line.min.js
new file mode 100644
index 0000000..81a798e
--- /dev/null
+++ b/examples/program_analysis/ui/leader-line.min.js
@@ -0,0 +1,2 @@
+/*! LeaderLine v1.0.5 (c) anseki https://anseki.github.io/leader-line/ */
+var LeaderLine=function(){"use strict";var te,g,y,S,_,o,t,h,f,p,a,i,l,v="leader-line",M=1,I=2,C=3,L=4,n={top:M,right:I,bottom:C,left:L},A=1,V=2,P=3,N=4,T=5,m={straight:A,arc:V,fluid:P,magnet:N,grid:T},ne="behind",r=v+"-defs",s='<svg xmlns="http://www.w3.org/2000/svg" version="1.1" id="leader-line-defs"><style><![CDATA[.leader-line{position:absolute;overflow:visible!important;pointer-events:none!important;font-size:16px}#leader-line-defs{width:0;height:0;position:absolute;left:0;top:0}.leader-line-line-path{fill:none}.leader-line-mask-bg-rect{fill:#fff}.leader-line-caps-mask-anchor,.leader-line-caps-mask-marker-shape{fill:#000}.leader-line-caps-mask-anchor{stroke:#000}.leader-line-caps-mask-line,.leader-line-plugs-face{stroke:transparent}.leader-line-line-mask-shape{stroke:#fff}.leader-line-line-outline-mask-shape{stroke:#000}.leader-line-plug-mask-shape{fill:#fff;stroke:#000}.leader-line-plug-outline-mask-shape{fill:#000;stroke:#fff}.leader-line-areaAnchor{position:absolute;overflow:visible!important}]]></style><defs><circle id="leader-line-disc" cx="0" cy="0" r="5"/><rect id="leader-line-square" x="-5" y="-5" width="10" height="10"/><polygon id="leader-line-arrow1" points="-8,-8 8,0 -8,8 -5,0"/><polygon id="leader-line-arrow2" points="-4,-8 4,0 -4,8 -7,5 -2,0 -7,-5"/><polygon id="leader-line-arrow3" points="-4,-5 8,0 -4,5"/><g id="leader-line-hand"><path style="fill: #fcfcfc" d="M9.19 11.14h4.75c1.38 0 2.49-1.11 2.49-2.49 0-.51-.15-.98-.41-1.37h1.3c1.38 0 2.49-1.11 2.49-2.49s-1.11-2.53-2.49-2.53h1.02c1.38 0 2.49-1.11 2.49-2.49s-1.11-2.49-2.49-2.49h14.96c1.37 0 2.49-1.11 2.49-2.49s-1.11-2.49-2.49-2.49H16.58C16-9.86 14.28-11.14 9.7-11.14c-4.79 0-6.55 3.42-7.87 4.73H-2.14v13.23h3.68C3.29 9.97 5.47 11.14 9.19 11.14L9.19 11.14Z"/><path style="fill: black" d="M13.95 12c1.85 0 3.35-1.5 3.35-3.35 0-.17-.02-.34-.04-.51h.07c1.85 0 3.35-1.5 3.35-3.35 0-.79-.27-1.51-.72-2.08 1.03-.57 1.74-1.67 1.74-2.93 0-.59-.16-1.15-.43-1.63h12.04c1.85 0 3.35-1.5 3.35-3.35 0-1.85-1.5-3.35-3.35-3.35H17.2C16.26-10.93 13.91-12 9.7-12 5.36-12 3.22-9.4 1.94-7.84c0 0-.29.33-.5.57-.63 0-3.58 0-3.58 0C-2.61-7.27-3-6.88-3-6.41v13.23c0 .47.39.86.86.86 0 0 2.48 0 3.2 0C2.9 10.73 5.29 12 9.19 12L13.95 12ZM9.19 10.28c-3.46 0-5.33-1.05-6.9-3.87-.15-.27-.44-.44-.75-.44 0 0-1.81 0-2.82 0V-5.55c1.06 0 3.11 0 3.11 0 .25 0 .44-.06.61-.25l.83-.95c1.23-1.49 2.91-3.53 6.43-3.53 3.45 0 4.9.74 5.57 1.72h-4.3c-.48 0-.86.38-.86.86s.39.86.86.86h22.34c.9 0 1.63.73 1.63 1.63 0 .9-.73 1.63-1.63 1.63H15.83c-.48 0-.86.38-.86.86 0 .47.39.86.86.86h2.52c.9 0 1.63.73 1.63 1.63s-.73 1.63-1.63 1.63h-3.12c-.48 0-.86.38-.86.86 0 .47.39.86.86.86h2.11c.88 0 1.63.76 1.63 1.67 0 .9-.73 1.63-1.63 1.63h-3.2c-.48 0-.86.39-.86.86 0 .47.39.86.86.86h1.36c.05.16.09.34.09.51 0 .9-.73 1.63-1.63 1.63C13.95 10.28 9.19 10.28 9.19 10.28Z"/></g><g id="leader-line-crosshair"><path d="M0-78.97c-43.54 0-78.97 35.43-78.97 78.97 0 43.54 35.43 78.97 78.97 78.97s78.97-35.43 78.97-78.97C78.97-43.54 43.55-78.97 0-78.97ZM76.51-1.21h-9.91v-9.11h-2.43v9.11h-11.45c-.64-28.12-23.38-50.86-51.5-51.5V-64.17h9.11V-66.6h-9.11v-9.91C42.46-75.86 75.86-42.45 76.51-1.21ZM-1.21-30.76h-9.11v2.43h9.11V-4.2c-1.44.42-2.57 1.54-2.98 2.98H-28.33v-9.11h-2.43v9.11H-50.29C-49.65-28-27.99-49.65-1.21-50.29V-30.76ZM-30.76 1.21v9.11h2.43v-9.11H-4.2c.42 1.44 1.54 2.57 2.98 2.98v24.13h-9.11v2.43h9.11v19.53C-27.99 49.65-49.65 28-50.29 1.21H-30.76ZM1.22 30.75h9.11v-2.43h-9.11V4.2c1.44-.42 2.56-1.54 2.98-2.98h24.13v9.11h2.43v-9.11h19.53C49.65 28 28 49.65 1.22 50.29V30.75ZM30.76-1.21v-9.11h-2.43v9.11H4.2c-.42-1.44-1.54-2.56-2.98-2.98V-28.33h9.11v-2.43h-9.11V-50.29C28-49.65 49.65-28 50.29-1.21H30.76ZM-1.21-76.51v9.91h-9.11v2.43h9.11v11.45c-28.12.64-50.86 23.38-51.5 51.5H-64.17v-9.11H-66.6v9.11h-9.91C-75.86-42.45-42.45-75.86-1.21-76.51ZM-76.51 1.21h9.91v9.11h2.43v-9.11h11.45c.64 28.12 23.38 50.86 51.5 51.5v11.45h-9.11v2.43h9.11v9.91C-42.45 75.86-75.86 42.45-76.51 1.21ZM1.22 76.51v-9.91h9.11v-2.43h-9.11v-11.45c28.12-.64 50.86-23.38 51.5-51.5h11.45v9.11h2.43v-9.11h9.91C75.86 42.45 42.45 75.86 1.22 76.51Z"/><path d="M0 83.58-7.1 96 7.1 96Z"/><path d="M0-83.58 7.1-96-7.1-96"/><path d="M83.58 0 96 7.1 96-7.1Z"/><path d="M-83.58 0-96-7.1-96 7.1Z"/></g></defs></svg>',ae={disc:{elmId:"leader-line-disc",noRotate:!0,bBox:{left:-5,top:-5,width:10,height:10,right:5,bottom:5},widthR:2.5,heightR:2.5,bCircle:5,sideLen:5,backLen:5,overhead:0,outlineBase:1,outlineMax:4},square:{elmId:"leader-line-square",noRotate:!0,bBox:{left:-5,top:-5,width:10,height:10,right:5,bottom:5},widthR:2.5,heightR:2.5,bCircle:5,sideLen:5,backLen:5,overhead:0,outlineBase:1,outlineMax:4},arrow1:{elmId:"leader-line-arrow1",bBox:{left:-8,top:-8,width:16,height:16,right:8,bottom:8},widthR:4,heightR:4,bCircle:8,sideLen:8,backLen:8,overhead:8,outlineBase:2,outlineMax:1.5},arrow2:{elmId:"leader-line-arrow2",bBox:{left:-7,top:-8,width:11,height:16,right:4,bottom:8},widthR:2.75,heightR:4,bCircle:8,sideLen:8,backLen:7,overhead:4,outlineBase:1,outlineMax:1.75},arrow3:{elmId:"leader-line-arrow3",bBox:{left:-4,top:-5,width:12,height:10,right:8,bottom:5},widthR:3,heightR:2.5,bCircle:8,sideLen:5,backLen:4,overhead:8,outlineBase:1,outlineMax:2.5},hand:{elmId:"leader-line-hand",bBox:{left:-3,top:-12,width:40,height:24,right:37,bottom:12},widthR:10,heightR:6,bCircle:37,sideLen:12,backLen:3,overhead:37},crosshair:{elmId:"leader-line-crosshair",noRotate:!0,bBox:{left:-96,top:-96,width:192,height:192,right:96,bottom:96},widthR:48,heightR:48,bCircle:96,sideLen:96,backLen:96,overhead:0}},E={behind:ne,disc:"disc",square:"square",arrow1:"arrow1",arrow2:"arrow2",arrow3:"arrow3",hand:"hand",crosshair:"crosshair"},ie={disc:"disc",square:"square",arrow1:"arrow1",arrow2:"arrow2",arrow3:"arrow3",hand:"hand",crosshair:"crosshair"},W=[M,I,C,L],x="auto",oe={x:"left",y:"top",width:"width",height:"height"},B=80,R=4,F=5,G=120,D=8,z=3.75,j=10,H=30,U=.5522847,Z=.25*Math.PI,u=/^\s*(\-?[\d\.]+)\s*(\%)?\s*$/,b="http://www.w3.org/2000/svg",e="-ms-scroll-limit"in document.documentElement.style&&"-ms-ime-align"in document.documentElement.style&&!window.navigator.msPointerEnabled,le=!e&&!!document.uniqueID,re="MozAppearance"in document.documentElement.style,se=!(e||re||!window.chrome||!window.CSS),ue=!e&&!le&&!re&&!se&&!window.chrome&&"WebkitAppearance"in document.documentElement.style,he=le||e?.2:.1,pe={path:P,lineColor:"coral",lineSize:4,plugSE:[ne,"arrow1"],plugSizeSE:[1,1],lineOutlineEnabled:!1,lineOutlineColor:"indianred",lineOutlineSize:.25,plugOutlineEnabledSE:[!1,!1],plugOutlineSizeSE:[1,1]},k=(a={}.toString,i={}.hasOwnProperty.toString,l=i.call(Object),function(e){var t,n;return e&&"[object Object]"===a.call(e)&&(!(t=Object.getPrototypeOf(e))||(n=t.hasOwnProperty("constructor")&&t.constructor)&&"function"==typeof n&&i.call(n)===l)}),w=Number.isFinite||function(e){return"number"==typeof e&&window.isFinite(e)},c=function(){var e,x={ease:[.25,.1,.25,1],linear:[0,0,1,1],"ease-in":[.42,0,1,1],"ease-out":[0,0,.58,1],"ease-in-out":[.42,0,.58,1]},b=1e3/60/2,t=window.requestAnimationFrame||window.mozRequestAnimationFrame||window.webkitRequestAnimationFrame||window.msRequestAnimationFrame||function(e){setTimeout(e,b)},n=window.cancelAnimationFrame||window.mozCancelAnimationFrame||window.webkitCancelAnimationFrame||window.msCancelAnimationFrame||function(e){clearTimeout(e)},a=Number.isFinite||function(e){return"number"==typeof e&&window.isFinite(e)},k=[],w=0;function l(){var i=Date.now(),o=!1;e&&(n.call(window,e),e=null),k.forEach(function(e){var t,n,a;if(e.framesStart){if((t=i-e.framesStart)>=e.duration&&e.count&&e.loopsLeft<=1)return a=e.frames[e.lastFrame=e.reverse?0:e.frames.length-1],e.frameCallback(a.value,!0,a.timeRatio,a.outputRatio),void(e.framesStart=null);if(t>e.duration){if(n=Math.floor(t/e.duration),e.count){if(n>=e.loopsLeft)return a=e.frames[e.lastFrame=e.reverse?0:e.frames.length-1],e.frameCallback(a.value,!0,a.timeRatio,a.outputRatio),void(e.framesStart=null);e.loopsLeft-=n}e.framesStart+=e.duration*n,t=i-e.framesStart}e.reverse&&(t=e.duration-t),a=e.frames[e.lastFrame=Math.round(t/b)],!1!==e.frameCallback(a.value,!1,a.timeRatio,a.outputRatio)?o=!0:e.framesStart=null}}),o&&(e=t.call(window,l))}function O(e,t){e.framesStart=Date.now(),null!=t&&(e.framesStart-=e.duration*(e.reverse?1-t:t)),e.loopsLeft=e.count,e.lastFrame=null,l()}return{add:function(n,e,t,a,i,o,l){var r,s,u,h,p,c,d,f,y,S,m,g,_,v=++w;function E(e,t){return{value:n(t),timeRatio:e,outputRatio:t}}if("string"==typeof i&&(i=x[i]),n=n||function(){},t<b)s=[E(0,0),E(1,1)];else{if(u=b/t,s=[E(0,0)],0===i[0]&&0===i[1]&&1===i[2]&&1===i[3])for(p=u;p<=1;p+=u)s.push(E(p,p));else for(c=h=(p=u)/10;c<=1;c+=h)void 0,S=(y=(f=c)*f)*f,_=3*(m=1-f)*y,p<=(d={x:(g=3*(m*m)*f)*i[0]+_*i[2]+S,y:g*i[1]+_*i[3]+S}).x&&(s.push(E(d.x,d.y)),p+=u);s.push(E(1,1))}return r={animId:v,frameCallback:e,duration:t,count:a,frames:s,reverse:!!o},k.push(r),!1!==l&&O(r,l),v},remove:function(n){var a;k.some(function(e,t){return e.animId===n&&(a=t,!(e.framesStart=null))})&&k.splice(a,1)},start:function(t,n,a){k.some(function(e){return e.animId===t&&(e.reverse=!!n,O(e,a),!0)})},stop:function(t,n){var a;return k.some(function(e){return e.animId===t&&(n?null!=e.lastFrame&&(a=e.frames[e.lastFrame].timeRatio):(a=(Date.now()-e.framesStart)/e.duration,e.reverse&&(a=1-a),a<0?a=0:1<a&&(a=1)),!(e.framesStart=null))}),a},validTiming:function(t){return"string"==typeof t?x[t]:Array.isArray(t)&&[0,1,2,3].every(function(e){return a(t[e])&&0<=t[e]&&t[e]<=1})?[t[0],t[1],t[2],t[3]]:null}}}(),d=function(e){e.SVGPathElement.prototype.getPathData&&e.SVGPathElement.prototype.setPathData||function(){var i={Z:"Z",M:"M",L:"L",C:"C",Q:"Q",A:"A",H:"H",V:"V",S:"S",T:"T",z:"Z",m:"m",l:"l",c:"c",q:"q",a:"a",h:"h",v:"v",s:"s",t:"t"},o=function(e){this._string=e,this._currentIndex=0,this._endIndex=this._string.length,this._prevCommand=null,this._skipOptionalSpaces()},l=-1!==e.navigator.userAgent.indexOf("MSIE ");o.prototype={parseSegment:function(){var e=this._string[this._currentIndex],t=i[e]?i[e]:null;if(null===t){if(null===this._prevCommand)return null;if(null===(t=("+"===e||"-"===e||"."===e||"0"<=e&&e<="9")&&"Z"!==this._prevCommand?"M"===this._prevCommand?"L":"m"===this._prevCommand?"l":this._prevCommand:null))return null}else this._currentIndex+=1;var n=null,a=(this._prevCommand=t).toUpperCase();return"H"===a||"V"===a?n=[this._parseNumber()]:"M"===a||"L"===a||"T"===a?n=[this._parseNumber(),this._parseNumber()]:"S"===a||"Q"===a?n=[this._parseNumber(),this._parseNumber(),this._parseNumber(),this._parseNumber()]:"C"===a?n=[this._parseNumber(),this._parseNumber(),this._parseNumber(),this._parseNumber(),this._parseNumber(),this._parseNumber()]:"A"===a?n=[this._parseNumber(),this._parseNumber(),this._parseNumber(),this._parseArcFlag(),this._parseArcFlag(),this._parseNumber(),this._parseNumber()]:"Z"===a&&(this._skipOptionalSpaces(),n=[]),null===n||0<=n.indexOf(null)?null:{type:t,values:n}},hasMoreData:function(){return this._currentIndex<this._endIndex},peekSegmentType:function(){var e=this._string[this._currentIndex];return i[e]?i[e]:null},initialCommandIsMoveTo:function(){if(!this.hasMoreData())return!0;var e=this.peekSegmentType();return"M"===e||"m"===e},_isCurrentSpace:function(){var e=this._string[this._currentIndex];return e<=" "&&(" "===e||"\n"===e||"\t"===e||"\r"===e||"\f"===e)},_skipOptionalSpaces:function(){for(;this._currentIndex<this._endIndex&&this._isCurrentSpace();)this._currentIndex+=1;return this._currentIndex<this._endIndex},_skipOptionalSpacesOrDelimiter:function(){return!(this._currentIndex<this._endIndex&&!this._isCurrentSpace()&&","!==this._string[this._currentIndex])&&(this._skipOptionalSpaces()&&this._currentIndex<this._endIndex&&","===this._string[this._currentIndex]&&(this._currentIndex+=1,this._skipOptionalSpaces()),this._currentIndex<this._endIndex)},_parseNumber:function(){var e=0,t=0,n=1,a=0,i=1,o=1,l=this._currentIndex;if(this._skipOptionalSpaces(),this._currentIndex<this._endIndex&&"+"===this._string[this._currentIndex]?this._currentIndex+=1:this._currentIndex<this._endIndex&&"-"===this._string[this._currentIndex]&&(this._currentIndex+=1,i=-1),this._currentIndex===this._endIndex||(this._string[this._currentIndex]<"0"||"9"<this._string[this._currentIndex])&&"."!==this._string[this._currentIndex])return null;for(var r=this._currentIndex;this._currentIndex<this._endIndex&&"0"<=this._string[this._currentIndex]&&this._string[this._currentIndex]<="9";)this._currentIndex+=1;if(this._currentIndex!==r)for(var s=this._currentIndex-1,u=1;r<=s;)t+=u*(this._string[s]-"0"),s-=1,u*=10;if(this._currentIndex<this._endIndex&&"."===this._string[this._currentIndex]){if(this._currentIndex+=1,this._currentIndex>=this._endIndex||this._string[this._currentIndex]<"0"||"9"<this._string[this._currentIndex])return null;for(;this._currentIndex<this._endIndex&&"0"<=this._string[this._currentIndex]&&this._string[this._currentIndex]<="9";)n*=10,a+=(this._string.charAt(this._currentIndex)-"0")/n,this._currentIndex+=1}if(this._currentIndex!==l&&this._currentIndex+1<this._endIndex&&("e"===this._string[this._currentIndex]||"E"===this._string[this._currentIndex])&&"x"!==this._string[this._currentIndex+1]&&"m"!==this._string[this._currentIndex+1]){if(this._currentIndex+=1,"+"===this._string[this._currentIndex]?this._currentIndex+=1:"-"===this._string[this._currentIndex]&&(this._currentIndex+=1,o=-1),this._currentIndex>=this._endIndex||this._string[this._currentIndex]<"0"||"9"<this._string[this._currentIndex])return null;for(;this._currentIndex<this._endIndex&&"0"<=this._string[this._currentIndex]&&this._string[this._currentIndex]<="9";)e*=10,e+=this._string[this._currentIndex]-"0",this._currentIndex+=1}var h=t+a;return h*=i,e&&(h*=Math.pow(10,o*e)),l===this._currentIndex?null:(this._skipOptionalSpacesOrDelimiter(),h)},_parseArcFlag:function(){if(this._currentIndex>=this._endIndex)return null;var e=null,t=this._string[this._currentIndex];if(this._currentIndex+=1,"0"===t)e=0;else{if("1"!==t)return null;e=1}return this._skipOptionalSpacesOrDelimiter(),e}};var a=function(e){if(!e||0===e.length)return[];var t=new o(e),n=[];if(t.initialCommandIsMoveTo())for(;t.hasMoreData();){var a=t.parseSegment();if(null===a)break;n.push(a)}return n},n=e.SVGPathElement.prototype.setAttribute,r=e.SVGPathElement.prototype.removeAttribute,d=e.Symbol?e.Symbol():"__cachedPathData",f=e.Symbol?e.Symbol():"__cachedNormalizedPathData",U=function(e,t,n,a,i,o,l,r,s,u){var h,p,c,d,f,y=function(e,t,n){return{x:e*Math.cos(n)-t*Math.sin(n),y:e*Math.sin(n)+t*Math.cos(n)}},S=(h=l,Math.PI*h/180),m=[];if(u)p=u[0],c=u[1],d=u[2],f=u[3];else{var g=y(e,t,-S);e=g.x,t=g.y;var _=y(n,a,-S),v=(e-(n=_.x))/2,E=(t-(a=_.y))/2,x=v*v/(i*i)+E*E/(o*o);1<x&&(i*=x=Math.sqrt(x),o*=x);var b=i*i,k=o*o,w=b*k-b*E*E-k*v*v,O=b*E*E+k*v*v,M=(r===s?-1:1)*Math.sqrt(Math.abs(w/O));d=M*i*E/o+(e+n)/2,f=M*-o*v/i+(t+a)/2,p=Math.asin(parseFloat(((t-f)/o).toFixed(9))),c=Math.asin(parseFloat(((a-f)/o).toFixed(9))),e<d&&(p=Math.PI-p),n<d&&(c=Math.PI-c),p<0&&(p=2*Math.PI+p),c<0&&(c=2*Math.PI+c),s&&c<p&&(p-=2*Math.PI),!s&&p<c&&(c-=2*Math.PI)}var I=c-p;if(Math.abs(I)>120*Math.PI/180){var C=c,L=n,A=a;c=s&&p<c?p+120*Math.PI/180*1:p+120*Math.PI/180*-1,n=d+i*Math.cos(c),a=f+o*Math.sin(c),m=U(n,a,L,A,i,o,l,0,s,[c,C,d,f])}I=c-p;var V=Math.cos(p),P=Math.sin(p),N=Math.cos(c),T=Math.sin(c),W=Math.tan(I/4),B=4/3*i*W,R=4/3*o*W,F=[e,t],G=[e+B*P,t-R*V],D=[n+B*T,a-R*N],z=[n,a];if(G[0]=2*F[0]-G[0],G[1]=2*F[1]-G[1],u)return[G,D,z].concat(m);m=[G,D,z].concat(m).join().split(",");var j=[],H=[];return m.forEach(function(e,t){t%2?H.push(y(m[t-1],m[t],S).y):H.push(y(m[t],m[t+1],S).x),6===H.length&&(j.push(H),H=[])}),j},y=function(e){return e.map(function(e){return{type:e.type,values:Array.prototype.slice.call(e.values)}})},S=function(e){var S=[],m=null,g=null,_=null,v=null,E=null,x=null,b=null;return e.forEach(function(e){if("M"===e.type){var t=e.values[0],n=e.values[1];S.push({type:"M",values:[t,n]}),v=x=t,E=b=n}else if("C"===e.type){var a=e.values[0],i=e.values[1],o=e.values[2],l=e.values[3];t=e.values[4],n=e.values[5];S.push({type:"C",values:[a,i,o,l,t,n]}),g=o,_=l,v=t,E=n}else if("L"===e.type){t=e.values[0],n=e.values[1];S.push({type:"L",values:[t,n]}),v=t,E=n}else if("H"===e.type){t=e.values[0];S.push({type:"L",values:[t,E]}),v=t}else if("V"===e.type){n=e.values[0];S.push({type:"L",values:[v,n]}),E=n}else if("S"===e.type){o=e.values[0],l=e.values[1],t=e.values[2],n=e.values[3];"C"===m||"S"===m?(r=v+(v-g),s=E+(E-_)):(r=v,s=E),S.push({type:"C",values:[r,s,o,l,t,n]}),g=o,_=l,v=t,E=n}else if("T"===e.type){t=e.values[0],n=e.values[1];"Q"===m||"T"===m?(a=v+(v-g),i=E+(E-_)):(a=v,i=E);var r=v+2*(a-v)/3,s=E+2*(i-E)/3,u=t+2*(a-t)/3,h=n+2*(i-n)/3;S.push({type:"C",values:[r,s,u,h,t,n]}),g=a,_=i,v=t,E=n}else if("Q"===e.type){a=e.values[0],i=e.values[1],t=e.values[2],n=e.values[3],r=v+2*(a-v)/3,s=E+2*(i-E)/3,u=t+2*(a-t)/3,h=n+2*(i-n)/3;S.push({type:"C",values:[r,s,u,h,t,n]}),g=a,_=i,v=t,E=n}else if("A"===e.type){var p=e.values[0],c=e.values[1],d=e.values[2],f=e.values[3],y=e.values[4];t=e.values[5],n=e.values[6];if(0===p||0===c)S.push({type:"C",values:[v,E,t,n,t,n]}),v=t,E=n;else if(v!==t||E!==n)U(v,E,t,n,p,c,d,f,y).forEach(function(e){S.push({type:"C",values:e}),v=t,E=n})}else"Z"===e.type&&(S.push(e),v=x,E=b);m=e.type}),S};e.SVGPathElement.prototype.setAttribute=function(e,t){"d"===e&&(this[d]=null,this[f]=null),n.call(this,e,t)},e.SVGPathElement.prototype.removeAttribute=function(e,t){"d"===e&&(this[d]=null,this[f]=null),r.call(this,e)},e.SVGPathElement.prototype.getPathData=function(e){if(e&&e.normalize){if(this[f])return y(this[f]);this[d]?n=y(this[d]):(n=a(this.getAttribute("d")||""),this[d]=y(n));var t=S((s=[],c=p=h=u=null,n.forEach(function(e){var t=e.type;if("M"===t){var n=e.values[0],a=e.values[1];s.push({type:"M",values:[n,a]}),u=p=n,h=c=a}else if("m"===t)n=u+e.values[0],a=h+e.values[1],s.push({type:"M",values:[n,a]}),u=p=n,h=c=a;else if("L"===t)n=e.values[0],a=e.values[1],s.push({type:"L",values:[n,a]}),u=n,h=a;else if("l"===t)n=u+e.values[0],a=h+e.values[1],s.push({type:"L",values:[n,a]}),u=n,h=a;else if("C"===t){var i=e.values[0],o=e.values[1],l=e.values[2],r=e.values[3];n=e.values[4],a=e.values[5],s.push({type:"C",values:[i,o,l,r,n,a]}),u=n,h=a}else"c"===t?(i=u+e.values[0],o=h+e.values[1],l=u+e.values[2],r=h+e.values[3],n=u+e.values[4],a=h+e.values[5],s.push({type:"C",values:[i,o,l,r,n,a]}),u=n,h=a):"Q"===t?(i=e.values[0],o=e.values[1],n=e.values[2],a=e.values[3],s.push({type:"Q",values:[i,o,n,a]}),u=n,h=a):"q"===t?(i=u+e.values[0],o=h+e.values[1],n=u+e.values[2],a=h+e.values[3],s.push({type:"Q",values:[i,o,n,a]}),u=n,h=a):"A"===t?(n=e.values[5],a=e.values[6],s.push({type:"A",values:[e.values[0],e.values[1],e.values[2],e.values[3],e.values[4],n,a]}),u=n,h=a):"a"===t?(n=u+e.values[5],a=h+e.values[6],s.push({type:"A",values:[e.values[0],e.values[1],e.values[2],e.values[3],e.values[4],n,a]}),u=n,h=a):"H"===t?(n=e.values[0],s.push({type:"H",values:[n]}),u=n):"h"===t?(n=u+e.values[0],s.push({type:"H",values:[n]}),u=n):"V"===t?(a=e.values[0],s.push({type:"V",values:[a]}),h=a):"v"===t?(a=h+e.values[0],s.push({type:"V",values:[a]}),h=a):"S"===t?(l=e.values[0],r=e.values[1],n=e.values[2],a=e.values[3],s.push({type:"S",values:[l,r,n,a]}),u=n,h=a):"s"===t?(l=u+e.values[0],r=h+e.values[1],n=u+e.values[2],a=h+e.values[3],s.push({type:"S",values:[l,r,n,a]}),u=n,h=a):"T"===t?(n=e.values[0],a=e.values[1],s.push({type:"T",values:[n,a]}),u=n,h=a):"t"===t?(n=u+e.values[0],a=h+e.values[1],s.push({type:"T",values:[n,a]}),u=n,h=a):"Z"!==t&&"z"!==t||(s.push({type:"Z",values:[]}),u=p,h=c)}),s));return this[f]=y(t),t}if(this[d])return y(this[d]);var s,u,h,p,c,n=a(this.getAttribute("d")||"");return this[d]=y(n),n},e.SVGPathElement.prototype.setPathData=function(e){if(0===e.length)l?this.setAttribute("d",""):this.removeAttribute("d");else{for(var t="",n=0,a=e.length;n<a;n+=1){var i=e[n];0<n&&(t+=" "),t+=i.type,i.values&&0<i.values.length&&(t+=" "+i.values.join(" "))}this.setAttribute("d",t)}},e.SVGRectElement.prototype.getPathData=function(e){var t=this.x.baseVal.value,n=this.y.baseVal.value,a=this.width.baseVal.value,i=this.height.baseVal.value,o=this.hasAttribute("rx")?this.rx.baseVal.value:this.ry.baseVal.value,l=this.hasAttribute("ry")?this.ry.baseVal.value:this.rx.baseVal.value;a/2<o&&(o=a/2),i/2<l&&(l=i/2);var r=[{type:"M",values:[t+o,n]},{type:"H",values:[t+a-o]},{type:"A",values:[o,l,0,0,1,t+a,n+l]},{type:"V",values:[n+i-l]},{type:"A",values:[o,l,0,0,1,t+a-o,n+i]},{type:"H",values:[t+o]},{type:"A",values:[o,l,0,0,1,t,n+i-l]},{type:"V",values:[n+l]},{type:"A",values:[o,l,0,0,1,t+o,n]},{type:"Z",values:[]}];return r=r.filter(function(e){return"A"!==e.type||0!==e.values[0]&&0!==e.values[1]}),e&&!0===e.normalize&&(r=S(r)),r},e.SVGCircleElement.prototype.getPathData=function(e){var t=this.cx.baseVal.value,n=this.cy.baseVal.value,a=this.r.baseVal.value,i=[{type:"M",values:[t+a,n]},{type:"A",values:[a,a,0,0,1,t,n+a]},{type:"A",values:[a,a,0,0,1,t-a,n]},{type:"A",values:[a,a,0,0,1,t,n-a]},{type:"A",values:[a,a,0,0,1,t+a,n]},{type:"Z",values:[]}];return e&&!0===e.normalize&&(i=S(i)),i},e.SVGEllipseElement.prototype.getPathData=function(e){var t=this.cx.baseVal.value,n=this.cy.baseVal.value,a=this.rx.baseVal.value,i=this.ry.baseVal.value,o=[{type:"M",values:[t+a,n]},{type:"A",values:[a,i,0,0,1,t,n+i]},{type:"A",values:[a,i,0,0,1,t-a,n]},{type:"A",values:[a,i,0,0,1,t,n-i]},{type:"A",values:[a,i,0,0,1,t+a,n]},{type:"Z",values:[]}];return e&&!0===e.normalize&&(o=S(o)),o},e.SVGLineElement.prototype.getPathData=function(){return[{type:"M",values:[this.x1.baseVal.value,this.y1.baseVal.value]},{type:"L",values:[this.x2.baseVal.value,this.y2.baseVal.value]}]},e.SVGPolylineElement.prototype.getPathData=function(){for(var e=[],t=0;t<this.points.numberOfItems;t+=1){var n=this.points.getItem(t);e.push({type:0===t?"M":"L",values:[n.x,n.y]})}return e},e.SVGPolygonElement.prototype.getPathData=function(){for(var e=[],t=0;t<this.points.numberOfItems;t+=1){var n=this.points.getItem(t);e.push({type:0===t?"M":"L",values:[n.x,n.y]})}return e.push({type:"Z",values:[]}),e}}()},O=function(n){var a={};function i(e){if(a[e])return a[e].exports;var t=a[e]={i:e,l:!1,exports:{}};return n[e].call(t.exports,t,t.exports,i),t.l=!0,t.exports}return i.m=n,i.c=a,i.d=function(e,t,n){i.o(e,t)||Object.defineProperty(e,t,{configurable:!1,enumerable:!0,get:n})},i.r=function(e){Object.defineProperty(e,"__esModule",{value:!0})},i.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return i.d(t,"a",t),t},i.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},i.p="",i(i.s=0)}([function(e,t,n){n.r(t);var a=500,i=[],o=window.requestAnimationFrame||window.mozRequestAnimationFrame||window.webkitRequestAnimationFrame||window.msRequestAnimationFrame||function(e){return setTimeout(e,1e3/60)},l=window.cancelAnimationFrame||window.mozCancelAnimationFrame||window.webkitCancelAnimationFrame||window.msCancelAnimationFrame||function(e){return clearTimeout(e)},r=void 0,s=Date.now();function u(){var t=void 0,e=void 0;r&&(l.call(window,r),r=null),i.forEach(function(e){e.event&&(e.listener(e.event),e.event=null,t=!0)}),t?(s=Date.now(),e=!0):Date.now()-s<a&&(e=!0),e&&(r=o.call(window,u))}function h(n){var a=-1;return i.some(function(e,t){return e.listener===n&&(a=t,!0)}),a}var p={add:function(e){var t=void 0;return-1===h(e)?(i.push(t={listener:e}),function(e){t.event=e,r||u()}):null},remove:function(e){var t;-1<(t=h(e))&&(i.splice(t,1),!i.length&&r&&(l.call(window,r),r=null))}};t.default=p}]).default,Y={line_altColor:{iniValue:!1},line_color:{},line_colorTra:{iniValue:!1},line_strokeWidth:{},plug_enabled:{iniValue:!1},plug_enabledSE:{hasSE:!0,iniValue:!1},plug_plugSE:{hasSE:!0,iniValue:ne},plug_colorSE:{hasSE:!0},plug_colorTraSE:{hasSE:!0,iniValue:!1},plug_markerWidthSE:{hasSE:!0},plug_markerHeightSE:{hasSE:!0},lineOutline_enabled:{iniValue:!1},lineOutline_color:{},lineOutline_colorTra:{iniValue:!1},lineOutline_strokeWidth:{},lineOutline_inStrokeWidth:{},plugOutline_enabledSE:{hasSE:!0,iniValue:!1},plugOutline_plugSE:{hasSE:!0,iniValue:ne},plugOutline_colorSE:{hasSE:!0},plugOutline_colorTraSE:{hasSE:!0,iniValue:!1},plugOutline_strokeWidthSE:{hasSE:!0},plugOutline_inStrokeWidthSE:{hasSE:!0},position_socketXYSE:{hasSE:!0,hasProps:!0},position_plugOverheadSE:{hasSE:!0},position_path:{},position_lineStrokeWidth:{},position_socketGravitySE:{hasSE:!0},path_pathData:{},path_edge:{hasProps:!0},viewBox_bBox:{hasProps:!0},viewBox_plugBCircleSE:{hasSE:!0},lineMask_enabled:{iniValue:!1},lineMask_outlineMode:{iniValue:!1},lineMask_x:{},lineMask_y:{},lineOutlineMask_x:{},lineOutlineMask_y:{},maskBGRect_x:{},maskBGRect_y:{},capsMaskAnchor_enabledSE:{hasSE:!0,iniValue:!1},capsMaskAnchor_pathDataSE:{hasSE:!0},capsMaskAnchor_strokeWidthSE:{hasSE:!0},capsMaskMarker_enabled:{iniValue:!1},capsMaskMarker_enabledSE:{hasSE:!0,iniValue:!1},capsMaskMarker_plugSE:{hasSE:!0,iniValue:ne},capsMaskMarker_markerWidthSE:{hasSE:!0},capsMaskMarker_markerHeightSE:{hasSE:!0},caps_enabled:{iniValue:!1},attach_plugSideLenSE:{hasSE:!0},attach_plugBackLenSE:{hasSE:!0}},X={show_on:{},show_effect:{},show_animOptions:{},show_animId:{},show_inAnim:{}},q="fade",Q=[],K={},J=0,$={},ee=0;function ce(t,n){var e,a;return typeof t!=typeof n||(e=k(t)?"obj":Array.isArray(t)?"array":"")!=(k(n)?"obj":Array.isArray(n)?"array":"")||("obj"===e?ce(a=Object.keys(t).sort(),Object.keys(n).sort())||a.some(function(e){return ce(t[e],n[e])}):"array"===e?t.length!==n.length||t.some(function(e,t){return ce(e,n[t])}):t!==n)}function de(n){return n?k(n)?Object.keys(n).reduce(function(e,t){return e[t]=de(n[t]),e},{}):Array.isArray(n)?n.map(de):n:n}function fe(e){var t,n,a,i=1,o=e=(e+"").trim();function l(e){var t=1,n=u.exec(e);return n&&(t=parseFloat(n[1]),n[2]?t=0<=t&&t<=100?t/100:1:(t<0||1<t)&&(t=1)),t}return(t=/^(rgba|hsla|hwb|gray|device\-cmyk)\s*\(([\s\S]+)\)$/i.exec(e))?(n=t[1].toLowerCase(),a=t[2].trim().split(/\s*,\s*/),"rgba"===n&&4===a.length?(i=l(a[3]),o="rgb("+a.slice(0,3).join(", ")+")"):"hsla"===n&&4===a.length?(i=l(a[3]),o="hsl("+a.slice(0,3).join(", ")+")"):"hwb"===n&&4===a.length?(i=l(a[3]),o="hwb("+a.slice(0,3).join(", ")+")"):"gray"===n&&2===a.length?(i=l(a[1]),o="gray("+a[0]+")"):"device-cmyk"===n&&5<=a.length&&(i=l(a[4]),o="device-cmyk("+a.slice(0,4).join(", ")+")")):(t=/^\#(?:([\da-f]{6})([\da-f]{2})|([\da-f]{3})([\da-f]))$/i.exec(e))?t[1]?(i=parseInt(t[2],16)/255,o="#"+t[1]):(i=parseInt(t[4]+t[4],16)/255,o="#"+t[3]):"transparent"===e.toLocaleLowerCase()&&(i=0),[i,o]}function ye(e){return!(!e||e.nodeType!==Node.ELEMENT_NODE||"function"!=typeof e.getBoundingClientRect)}function Se(e,t){var n,a,i,o,l={};if(!(i=e.ownerDocument))return console.error("Cannot get document that contains the element."),null;if(e.compareDocumentPosition(i)&Node.DOCUMENT_POSITION_DISCONNECTED)return console.error("A disconnected element was passed."),null;for(a in n=e.getBoundingClientRect())l[a]=n[a];if(!t){if(!(o=i.defaultView))return console.error("Cannot get window that contains the element."),null;l.left+=o.pageXOffset,l.right+=o.pageXOffset,l.top+=o.pageYOffset,l.bottom+=o.pageYOffset}return l}function me(e,t){var n,a,i=[],o=e;for(t=t||window;;){if(!(n=o.ownerDocument))return console.error("Cannot get document that contains the element."),null;if(!(a=n.defaultView))return console.error("Cannot get window that contains the element."),null;if(a===t)break;if(!(o=a.frameElement))return console.error("`baseWindow` was not found."),null;i.unshift(o)}return i}function ge(e,t){var n,a,o=0,l=0;return(a=me(e,t=t||window))?a.length?(a.forEach(function(e,t){var n,a,i=Se(e,0<t);o+=i.left,l+=i.top,a=(n=e).ownerDocument.defaultView.getComputedStyle(n,""),i={left:n.clientLeft+parseFloat(a.paddingLeft),top:n.clientTop+parseFloat(a.paddingTop)},o+=i.left,l+=i.top}),(n=Se(e,!0)).left+=o,n.right+=o,n.top+=l,n.bottom+=l,n):Se(e):null}function _e(e,t){var n=e.x-t.x,a=e.y-t.y;return Math.sqrt(n*n+a*a)}function ve(e,t,n){var a=t.x-e.x,i=t.y-e.y;return{x:e.x+a*n,y:e.y+i*n,angle:Math.atan2(i,a)/(Math.PI/180)}}function Ee(e,t,n){var a=Math.atan2(e.y-t.y,t.x-e.x);return{x:t.x+Math.cos(a)*n,y:t.y+Math.sin(a)*n*-1}}function xe(e,t,n,a,i){var o=i*i,l=o*i,r=1-i,s=r*r,u=s*r,h=u*e.x+3*s*i*t.x+3*r*o*n.x+l*a.x,p=u*e.y+3*s*i*t.y+3*r*o*n.y+l*a.y,c=e.x+2*i*(t.x-e.x)+o*(n.x-2*t.x+e.x),d=e.y+2*i*(t.y-e.y)+o*(n.y-2*t.y+e.y),f=t.x+2*i*(n.x-t.x)+o*(a.x-2*n.x+t.x),y=t.y+2*i*(n.y-t.y)+o*(a.y-2*n.y+t.y),S=r*e.x+i*t.x,m=r*e.y+i*t.y,g=r*n.x+i*a.x,_=r*n.y+i*a.y,v=90-180*Math.atan2(c-f,d-y)/Math.PI;return{x:h,y:p,fromP2:{x:c,y:d},toP1:{x:f,y:y},fromP1:{x:S,y:m},toP2:{x:g,y:_},angle:v+=180<v?-180:180}}function be(n,a,i,o,e){function l(e,t,n,a,i){return e*(e*(-3*t+9*n-9*a+3*i)+6*t-12*n+6*a)-3*t+3*n}var r,s,u,h,p,c=[.2491,.2491,.2335,.2335,.2032,.2032,.1601,.1601,.1069,.1069,.0472,.0472],d=0;return r=(e=null==e||1<e?1:e<0?0:e)/2,[-.1252,.1252,-.3678,.3678,-.5873,.5873,-.7699,.7699,-.9041,.9041,-.9816,.9816].forEach(function(e,t){u=l(s=r*e+r,n.x,a.x,i.x,o.x),h=l(s,n.y,a.y,i.y,o.y),p=u*u+h*h,d+=c[t]*Math.sqrt(p)}),r*d}function ke(e,t,n,a,i){for(var o,l=.5,r=1-l;o=be(e,t,n,a,r),!(Math.abs(o-i)<=.01);)r+=(o<i?1:-1)*(l/=2);return r}function we(e,n){var a;return e.forEach(function(e){var t=n?e.map(function(e){var t={x:e.x,y:e.y};return n(t),t}):e;a||(a=[{type:"M",values:[t[0].x,t[0].y]}]),a.push(t.length?2===t.length?{type:"L",values:[t[1].x,t[1].y]}:{type:"C",values:[t[1].x,t[1].y,t[2].x,t[2].y,t[3].x,t[3].y]}:{type:"Z",values:[]})}),a}function Oe(e){var n=[],a=0;return e.forEach(function(e){var t=(2===e.length?_e:be).apply(null,e);n.push(t),a+=t}),{segsLen:n,lenAll:a}}function Me(e,a){return null==e||null==a||e.length!==a.length||e.some(function(e,t){var n=a[t];return e.type!==n.type||e.values.some(function(e,t){return e!==n.values[t]})})}function Ie(e,t,n){e.events[t]?e.events[t].indexOf(n)<0&&e.events[t].push(n):e.events[t]=[n]}function Ce(e,t,n){var a;e.events[t]&&-1<(a=e.events[t].indexOf(n))&&e.events[t].splice(a,1)}function Le(e){t&&clearTimeout(t),Q.push(e),t=setTimeout(function(){Q.forEach(function(e){e()}),Q=[]},0)}function Ae(e,t){e.reflowTargets.indexOf(t)<0&&e.reflowTargets.push(t)}function Ve(e){e.reflowTargets.forEach(function(e){var n;n=e,setTimeout(function(){var e=n.parentNode,t=n.nextSibling;e.insertBefore(e.removeChild(n),t)},0)}),e.reflowTargets=[]}function Pe(e,t,n,a,i,o,l){var r,s,u;"auto-start-reverse"===n?("boolean"!=typeof h&&(t.setAttribute("orient","auto-start-reverse"),h=t.orientType.baseVal===SVGMarkerElement.SVG_MARKER_ORIENT_UNKNOWN),h?t.setAttribute("orient",n):((r=i.createSVGTransform()).setRotate(180,0,0),o.transform.baseVal.appendItem(r),t.setAttribute("orient","auto"),u=!0)):(t.setAttribute("orient",n),!1===h&&o.transform.baseVal.clear()),s=t.viewBox.baseVal,u?(s.x=-a.right,s.y=-a.bottom):(s.x=a.left,s.y=a.top),s.width=a.width,s.height=a.height,le&&Ae(e,l)}function Ne(e,t){return{prop:e?"markerEnd":"markerStart",orient:t?t.noRotate?"0":e?"auto":"auto-start-reverse":null}}function Te(n,a){Object.keys(a).forEach(function(e){var t=a[e];n[e]=null!=t.iniValue?t.hasSE?[t.iniValue,t.iniValue]:t.iniValue:t.hasSE?t.hasProps?[{},{}]:[]:t.hasProps?{}:null})}function We(t,e,n,a,i){return a!==e[n]&&(e[n]=a,i&&i.forEach(function(e){e(t,a,n)}),!0)}function Be(e){function t(e,t){return e+parseFloat(t)}var n=e.document,a=e.getComputedStyle(n.documentElement,""),i=e.getComputedStyle(n.body,""),o={x:0,y:0};return"static"!==i.position?(o.x-=[a.marginLeft,a.borderLeftWidth,a.paddingLeft,i.marginLeft,i.borderLeftWidth].reduce(t,0),o.y-=[a.marginTop,a.borderTopWidth,a.paddingTop,i.marginTop,i.borderTopWidth].reduce(t,0)):"static"!==a.position&&(o.x-=[a.marginLeft,a.borderLeftWidth].reduce(t,0),o.y-=[a.marginTop,a.borderTopWidth].reduce(t,0)),o}function Re(e){var t,n=e.document;n.getElementById(r)||(t=(new e.DOMParser).parseFromString(s,"image/svg+xml"),n.body.appendChild(t.documentElement),d(e))}function Fe(u){var _,f,v,e,n,a,i,y,s,h,p,t,o,l,r,c,d,S,m,g=u.options,E=u.curStats,x=u.aplStats,b=E.position_socketXYSE,k=!1;function w(e,t){var n=t===M?{x:e.left+e.width/2,y:e.top}:t===I?{x:e.right,y:e.top+e.height/2}:t===C?{x:e.left+e.width/2,y:e.bottom}:{x:e.left,y:e.top+e.height/2};return n.socketId=t,n}function O(e){return{x:e.x,y:e.y}}if(E.position_path=g.path,E.position_lineStrokeWidth=E.line_strokeWidth,E.position_socketGravitySE=_=de(g.socketGravitySE),f=[0,1].map(function(e){var t,n,a,i=g.anchorSE[e],o=u.optionIsAttach.anchorSE[e],l=!1!==o?$[i._id]:null,r=!1!==o&&l.conf.getStrokeWidth?l.conf.getStrokeWidth(l,u):0,s=!1!==o&&l.conf.getBBoxNest?l.conf.getBBoxNest(l,u,r):ge(i,u.baseWindow);return E.capsMaskAnchor_pathDataSE[e]=!1!==o&&l.conf.getPathData?l.conf.getPathData(l,u,r):(n=null!=(t=s).right?t.right:t.left+t.width,a=null!=t.bottom?t.bottom:t.top+t.height,[{type:"M",values:[t.left,t.top]},{type:"L",values:[n,t.top]},{type:"L",values:[n,a]},{type:"L",values:[t.left,a]},{type:"Z",values:[]}]),E.capsMaskAnchor_strokeWidthSE[e]=r,s}),i=-1,g.socketSE[0]&&g.socketSE[1]?(b[0]=w(f[0],g.socketSE[0]),b[1]=w(f[1],g.socketSE[1])):(g.socketSE[0]||g.socketSE[1]?(g.socketSE[0]?(n=0,a=1):(n=1,a=0),b[n]=w(f[n],g.socketSE[n]),(e=W.map(function(e){return w(f[a],e)})).forEach(function(e){var t=_e(e,b[n]);(t<i||-1===i)&&(b[a]=e,i=t)})):(e=W.map(function(e){return w(f[1],e)}),W.map(function(e){return w(f[0],e)}).forEach(function(n){e.forEach(function(e){var t=_e(n,e);(t<i||-1===i)&&(b[0]=n,b[1]=e,i=t)})})),[0,1].forEach(function(e){var t,n;g.socketSE[e]||(f[e].width||f[e].height?f[e].width||b[e].socketId!==L&&b[e].socketId!==I?f[e].height||b[e].socketId!==M&&b[e].socketId!==C||(b[e].socketId=0<=b[e?0:1].y-f[e].top?C:M):b[e].socketId=0<=b[e?0:1].x-f[e].left?I:L:(t=b[e?0:1].x-f[e].left,n=b[e?0:1].y-f[e].top,b[e].socketId=Math.abs(t)>=Math.abs(n)?0<=t?I:L:0<=n?C:M))})),E.position_path!==x.position_path||E.position_lineStrokeWidth!==x.position_lineStrokeWidth||[0,1].some(function(e){return E.position_plugOverheadSE[e]!==x.position_plugOverheadSE[e]||(i=b[e],o=x.position_socketXYSE[e],i.x!==o.x||i.y!==o.y||i.socketId!==o.socketId)||(t=_[e],n=x.position_socketGravitySE[e],(a=null==t?"auto":Array.isArray(t)?"array":"number")!==(null==n?"auto":Array.isArray(n)?"array":"number")||("array"===a?t[0]!==n[0]||t[1]!==n[1]:t!==n));var t,n,a,i,o})){switch(u.pathList.baseVal=v=[],u.pathList.animVal=null,E.position_path){case A:v.push([O(b[0]),O(b[1])]);break;case V:t="number"==typeof _[0]&&0<_[0]||"number"==typeof _[1]&&0<_[1],o=Z*(t?-1:1),l=Math.atan2(b[1].y-b[0].y,b[1].x-b[0].x),r=-l+o,c=Math.PI-l-o,d=_e(b[0],b[1])/Math.sqrt(2)*U,S={x:b[0].x+Math.cos(r)*d,y:b[0].y+Math.sin(r)*d*-1},m={x:b[1].x+Math.cos(c)*d,y:b[1].y+Math.sin(c)*d*-1},v.push([O(b[0]),S,m,O(b[1])]);break;case P:case N:s=[_[0],E.position_path===N?0:_[1]],h=[],p=[],b.forEach(function(e,t){var n,a,i,o,l,r=s[t];Array.isArray(r)?n={x:r[0],y:r[1]}:"number"==typeof r?n=e.socketId===M?{x:0,y:-r}:e.socketId===I?{x:r,y:0}:e.socketId===C?{x:0,y:r}:{x:-r,y:0}:(a=b[t?0:1],o=0<(i=E.position_plugOverheadSE[t])?G+(D<i?(i-D)*z:0):B+(E.position_lineStrokeWidth>R?(E.position_lineStrokeWidth-R)*F:0),e.socketId===M?((l=(e.y-a.y)/2)<o&&(l=o),n={x:0,y:-l}):e.socketId===I?((l=(a.x-e.x)/2)<o&&(l=o),n={x:l,y:0}):e.socketId===C?((l=(a.y-e.y)/2)<o&&(l=o),n={x:0,y:l}):((l=(e.x-a.x)/2)<o&&(l=o),n={x:-l,y:0})),h[t]=e.x+n.x,p[t]=e.y+n.y}),v.push([O(b[0]),{x:h[0],y:p[0]},{x:h[1],y:p[1]},O(b[1])]);break;case T:!function(){var a,o=1,l=2,r=3,s=4,u=[[],[]],h=[];function p(e){return e===o?r:e===l?s:e===r?o:l}function c(e){return e===l||e===s?"x":"y"}function d(e,t,n){var a={x:e.x,y:e.y};if(n){if(n===p(e.dirId))throw new Error("Invalid dirId: "+n);a.dirId=n}else a.dirId=e.dirId;return a.dirId===o?a.y-=t:a.dirId===l?a.x+=t:a.dirId===r?a.y+=t:a.x-=t,a}function f(e,t){return t.dirId===o?e.y<=t.y:t.dirId===l?e.x>=t.x:t.dirId===r?e.y>=t.y:e.x<=t.x}function y(e,t){return t.dirId===o||t.dirId===r?e.x===t.x:e.y===t.y}function S(e){return e[0]?{contain:0,notContain:1}:{contain:1,notContain:0}}function m(e,t,n){return Math.abs(t[n]-e[n])}function g(e,t,n){return"x"===n?e.x<t.x?l:s:e.y<t.y?r:o}function e(){var e,t,a,i,n=[f(h[1],h[0]),f(h[0],h[1])],o=[c(h[0].dirId),c(h[1].dirId)];if(o[0]===o[1]){if(n[0]&&n[1])return y(h[1],h[0])||(h[0][o[0]]===h[1][o[1]]?(u[0].push(h[0]),u[1].push(h[1])):(e=h[0][o[0]]+(h[1][o[1]]-h[0][o[0]])/2,u[0].push(d(h[0],Math.abs(e-h[0][o[0]]))),u[1].push(d(h[1],Math.abs(e-h[1][o[1]]))))),!1;n[0]!==n[1]?(t=S(n),(a=m(h[t.notContain],h[t.contain],o[t.notContain]))<H&&(h[t.notContain]=d(h[t.notContain],H-a)),u[t.notContain].push(h[t.notContain]),h[t.notContain]=d(h[t.notContain],H,y(h[t.contain],h[t.notContain])?"x"===o[t.notContain]?r:l:g(h[t.notContain],h[t.contain],"x"===o[t.notContain]?"y":"x"))):(a=m(h[0],h[1],"x"===o[0]?"y":"x"),u.forEach(function(e,t){var n=0===t?1:0;e.push(h[t]),h[t]=d(h[t],H,2*H<=a?g(h[t],h[n],"x"===o[t]?"y":"x"):"x"===o[t]?r:l)}))}else{if(n[0]&&n[1])return y(h[1],h[0])?u[1].push(h[1]):y(h[0],h[1])?u[0].push(h[0]):u[0].push("x"===o[0]?{x:h[1].x,y:h[0].y}:{x:h[0].x,y:h[1].y}),!1;n[0]!==n[1]?(t=S(n),u[t.notContain].push(h[t.notContain]),h[t.notContain]=d(h[t.notContain],H,m(h[t.notContain],h[t.contain],o[t.contain])>=H?g(h[t.notContain],h[t.contain],o[t.contain]):h[t.contain].dirId)):(i=[{x:h[0].x,y:h[0].y},{x:h[1].x,y:h[1].y}],u.forEach(function(e,t){var n=0===t?1:0,a=m(i[t],i[n],o[t]);a<H&&(h[t]=d(h[t],H-a)),e.push(h[t]),h[t]=d(h[t],H,g(h[t],h[n],o[n]))}))}return!0}for(b.forEach(function(e,t){var n,a=O(e),i=_[t];n=Array.isArray(i)?i[0]<0?[s,-i[0]]:0<i[0]?[l,i[0]]:i[1]<0?[o,-i[1]]:0<i[1]?[r,i[1]]:[e.socketId,0]:"number"!=typeof i?[e.socketId,H]:0<=i?[e.socketId,i]:[p(e.socketId),-i],a.dirId=n[0],i=n[1],u[t].push(a),h[t]=d(a,i)});e(););u[1].reverse(),u[0].concat(u[1]).forEach(function(e,t){var n={x:e.x,y:e.y};0<t&&v.push([a,n]),a=n})}()}y=[],E.position_plugOverheadSE.forEach(function(e,t){var n,a,i,o,l,r,s,u,h,p,c,d=!t;0<e?2===(n=v[a=d?0:v.length-1]).length?(y[a]=y[a]||_e.apply(null,n),y[a]>j&&(y[a]-e<j&&(e=y[a]-j),i=ve(n[0],n[1],(d?e:y[a]-e)/y[a]),v[a]=d?[i,n[1]]:[n[0],i],y[a]-=e)):(y[a]=y[a]||be.apply(null,n),y[a]>j&&(y[a]-e<j&&(e=y[a]-j),i=xe(n[0],n[1],n[2],n[3],ke(n[0],n[1],n[2],n[3],d?e:y[a]-e)),d?(o=n[0],l=i.toP1):(o=n[3],l=i.fromP2),r=Math.atan2(o.y-i.y,i.x-o.x),s=_e(i,l),i.x=o.x+Math.cos(r)*e,i.y=o.y+Math.sin(r)*e*-1,l.x=i.x+Math.cos(r)*s,l.y=i.y+Math.sin(r)*s*-1,v[a]=d?[i,i.toP1,i.toP2,n[3]]:[n[0],i.fromP1,i.fromP2,i],y[a]=null)):e<0&&(n=v[a=d?0:v.length-1],u=b[t].socketId,h=u===L||u===I?"x":"y",e<(c=-f[t]["x"===h?"width":"height"])&&(e=c),p=e*(u===L||u===M?-1:1),2===n.length?n[d?0:n.length-1][h]+=p:(d?[0,1]:[n.length-2,n.length-1]).forEach(function(e){n[e][h]+=p}),y[a]=null)}),x.position_socketXYSE=de(b),x.position_plugOverheadSE=de(E.position_plugOverheadSE),x.position_path=E.position_path,x.position_lineStrokeWidth=E.position_lineStrokeWidth,x.position_socketGravitySE=de(_),k=!0,u.events.apl_position&&u.events.apl_position.forEach(function(e){e(u,v)})}return k}function Ge(t,n){n!==t.isShown&&(!!n!=!!t.isShown&&(t.svg.style.visibility=n?"":"hidden"),t.isShown=n,t.events&&t.events.svgShow&&t.events.svgShow.forEach(function(e){e(t,n)}))}function De(e,t){var n,a,i,o,l,h,p,c,d,f,r,s,u,y,S,m,g,_,v,E,x,b,k,w,O,M,I,C,L,A,V,P,N,T,W,B,R,F,G,D,z,j,H,U,Z,Y,X,q,Q,K,J,$,ee={};t.line&&(ee.line=(a=(n=e).options,i=n.curStats,o=n.events,l=!1,l=We(n,i,"line_color",a.lineColor,o.cur_line_color)||l,l=We(n,i,"line_colorTra",fe(i.line_color)[0]<1)||l,l=We(n,i,"line_strokeWidth",a.lineSize,o.cur_line_strokeWidth)||l)),(t.plug||ee.line)&&(ee.plug=(p=(h=e).options,c=h.curStats,d=h.events,f=!1,[0,1].forEach(function(e){var t,n,a,i,o,l,r,s,u=p.plugSE[e];f=We(h,c.plug_enabledSE,e,u!==ne)||f,f=We(h,c.plug_plugSE,e,u)||f,f=We(h,c.plug_colorSE,e,s=p.plugColorSE[e]||c.line_color,d.cur_plug_colorSE)||f,f=We(h,c.plug_colorTraSE,e,fe(s)[0]<1)||f,u!==ne&&(i=n=(t=ae[ie[u]]).widthR*p.plugSizeSE[e],o=a=t.heightR*p.plugSizeSE[e],ue&&(i*=c.line_strokeWidth,o*=c.line_strokeWidth),f=We(h,c.plug_markerWidthSE,e,i)||f,f=We(h,c.plug_markerHeightSE,e,o)||f,c.capsMaskMarker_markerWidthSE[e]=n,c.capsMaskMarker_markerHeightSE[e]=a),c.plugOutline_plugSE[e]=c.capsMaskMarker_plugSE[e]=u,c.plug_enabledSE[e]?(s=c.line_strokeWidth/pe.lineSize*p.plugSizeSE[e],c.position_plugOverheadSE[e]=t.overhead*s,c.viewBox_plugBCircleSE[e]=t.bCircle*s,l=t.sideLen*s,r=t.backLen*s):(c.position_plugOverheadSE[e]=-c.line_strokeWidth/2,c.viewBox_plugBCircleSE[e]=l=r=0),We(h,c.attach_plugSideLenSE,e,l,d.cur_attach_plugSideLenSE),We(h,c.attach_plugBackLenSE,e,r,d.cur_attach_plugBackLenSE),c.capsMaskAnchor_enabledSE[e]=!c.plug_enabledSE[e]}),f=We(h,c,"plug_enabled",c.plug_enabledSE[0]||c.plug_enabledSE[1])||f)),(t.lineOutline||ee.line)&&(ee.lineOutline=(u=(r=e).options,y=r.curStats,S=!1,S=We(r,y,"lineOutline_enabled",u.lineOutlineEnabled)||S,S=We(r,y,"lineOutline_color",u.lineOutlineColor)||S,S=We(r,y,"lineOutline_colorTra",fe(y.lineOutline_color)[0]<1)||S,s=y.line_strokeWidth*u.lineOutlineSize,S=We(r,y,"lineOutline_strokeWidth",y.line_strokeWidth-2*s)||S,S=We(r,y,"lineOutline_inStrokeWidth",y.lineOutline_colorTra?y.lineOutline_strokeWidth+2*he:y.line_strokeWidth-s)||S)),(t.plugOutline||ee.line||ee.plug||ee.lineOutline)&&(ee.plugOutline=(g=(m=e).options,_=m.curStats,v=!1,[0,1].forEach(function(e){var t,n=_.plugOutline_plugSE[e],a=n!==ne?ae[ie[n]]:null;v=We(m,_.plugOutline_enabledSE,e,g.plugOutlineEnabledSE[e]&&_.plug_enabled&&_.plug_enabledSE[e]&&!!a&&!!a.outlineBase)||v,v=We(m,_.plugOutline_colorSE,e,t=g.plugOutlineColorSE[e]||_.lineOutline_color)||v,v=We(m,_.plugOutline_colorTraSE,e,fe(t)[0]<1)||v,a&&a.outlineBase&&((t=g.plugOutlineSizeSE[e])>a.outlineMax&&(t=a.outlineMax),t*=2*a.outlineBase,v=We(m,_.plugOutline_strokeWidthSE,e,t)||v,v=We(m,_.plugOutline_inStrokeWidthSE,e,_.plugOutline_colorTraSE[e]?t-he/(_.line_strokeWidth/pe.lineSize)/g.plugSizeSE[e]*2:t/2)||v)}),v)),(t.faces||ee.line||ee.plug||ee.lineOutline||ee.plugOutline)&&(ee.faces=(b=(E=e).curStats,k=E.aplStats,w=E.events,O=!1,!b.line_altColor&&We(E,k,"line_color",x=b.line_color,w.apl_line_color)&&(E.lineFace.style.stroke=x,O=!0),We(E,k,"line_strokeWidth",x=b.line_strokeWidth,w.apl_line_strokeWidth)&&(E.lineShape.style.strokeWidth=x+"px",O=!0,(re||le)&&(Ae(E,E.lineShape),le&&(Ae(E,E.lineFace),Ae(E,E.lineMaskCaps)))),We(E,k,"lineOutline_enabled",x=b.lineOutline_enabled,w.apl_lineOutline_enabled)&&(E.lineOutlineFace.style.display=x?"inline":"none",O=!0),b.lineOutline_enabled&&(We(E,k,"lineOutline_color",x=b.lineOutline_color,w.apl_lineOutline_color)&&(E.lineOutlineFace.style.stroke=x,O=!0),We(E,k,"lineOutline_strokeWidth",x=b.lineOutline_strokeWidth,w.apl_lineOutline_strokeWidth)&&(E.lineOutlineMaskShape.style.strokeWidth=x+"px",O=!0,le&&(Ae(E,E.lineOutlineMaskCaps),Ae(E,E.lineOutlineFace))),We(E,k,"lineOutline_inStrokeWidth",x=b.lineOutline_inStrokeWidth,w.apl_lineOutline_inStrokeWidth)&&(E.lineMaskShape.style.strokeWidth=x+"px",O=!0,le&&(Ae(E,E.lineOutlineMaskCaps),Ae(E,E.lineOutlineFace)))),We(E,k,"plug_enabled",x=b.plug_enabled,w.apl_plug_enabled)&&(E.plugsFace.style.display=x?"inline":"none",O=!0),b.plug_enabled&&[0,1].forEach(function(n){var e=b.plug_plugSE[n],t=e!==ne?ae[ie[e]]:null,a=Ne(n,t);We(E,k.plug_enabledSE,n,x=b.plug_enabledSE[n],w.apl_plug_enabledSE)&&(E.plugsFace.style[a.prop]=x?"url(#"+E.plugMarkerIdSE[n]+")":"none",O=!0),b.plug_enabledSE[n]&&(We(E,k.plug_plugSE,n,e,w.apl_plug_plugSE)&&(E.plugFaceSE[n].href.baseVal="#"+t.elmId,Pe(E,E.plugMarkerSE[n],a.orient,t.bBox,E.svg,E.plugMarkerShapeSE[n],E.plugsFace),O=!0,re&&Ae(E,E.plugsFace)),We(E,k.plug_colorSE,n,x=b.plug_colorSE[n],w.apl_plug_colorSE)&&(E.plugFaceSE[n].style.fill=x,O=!0,(se||ue||le)&&!b.line_colorTra&&Ae(E,le?E.lineMaskCaps:E.capsMaskLine)),["markerWidth","markerHeight"].forEach(function(e){var t="plug_"+e+"SE";We(E,k[t],n,x=b[t][n],w["apl_"+t])&&(E.plugMarkerSE[n][e].baseVal.value=x,O=!0)}),We(E,k.plugOutline_enabledSE,n,x=b.plugOutline_enabledSE[n],w.apl_plugOutline_enabledSE)&&(x?(E.plugFaceSE[n].style.mask="url(#"+E.plugMaskIdSE[n]+")",E.plugOutlineFaceSE[n].style.display="inline"):(E.plugFaceSE[n].style.mask="none",E.plugOutlineFaceSE[n].style.display="none"),O=!0),b.plugOutline_enabledSE[n]&&(We(E,k.plugOutline_plugSE,n,e,w.apl_plugOutline_plugSE)&&(E.plugOutlineFaceSE[n].href.baseVal=E.plugMaskShapeSE[n].href.baseVal=E.plugOutlineMaskShapeSE[n].href.baseVal="#"+t.elmId,[E.plugMaskSE[n],E.plugOutlineMaskSE[n]].forEach(function(e){e.x.baseVal.value=t.bBox.left,e.y.baseVal.value=t.bBox.top,e.width.baseVal.value=t.bBox.width,e.height.baseVal.value=t.bBox.height}),O=!0),We(E,k.plugOutline_colorSE,n,x=b.plugOutline_colorSE[n],w.apl_plugOutline_colorSE)&&(E.plugOutlineFaceSE[n].style.fill=x,O=!0,le&&(Ae(E,E.lineMaskCaps),Ae(E,E.lineOutlineMaskCaps))),We(E,k.plugOutline_strokeWidthSE,n,x=b.plugOutline_strokeWidthSE[n],w.apl_plugOutline_strokeWidthSE)&&(E.plugOutlineMaskShapeSE[n].style.strokeWidth=x+"px",O=!0),We(E,k.plugOutline_inStrokeWidthSE,n,x=b.plugOutline_inStrokeWidthSE[n],w.apl_plugOutline_inStrokeWidthSE)&&(E.plugMaskShapeSE[n].style.strokeWidth=x+"px",O=!0)))}),O)),(t.position||ee.line||ee.plug)&&(ee.position=Fe(e)),(t.path||ee.position)&&(ee.path=(C=(M=e).curStats,L=M.aplStats,A=M.pathList.animVal||M.pathList.baseVal,V=C.path_edge,P=!1,A&&(V.x1=V.x2=A[0][0].x,V.y1=V.y2=A[0][0].y,C.path_pathData=I=we(A,function(e){e.x<V.x1&&(V.x1=e.x),e.y<V.y1&&(V.y1=e.y),e.x>V.x2&&(V.x2=e.x),e.y>V.y2&&(V.y2=e.y)}),Me(I,L.path_pathData)&&(M.linePath.setPathData(I),L.path_pathData=I,P=!0,le?(Ae(M,M.plugsFace),Ae(M,M.lineMaskCaps)):re&&Ae(M,M.linePath),M.events.apl_path&&M.events.apl_path.forEach(function(e){e(M,I)}))),P)),ee.viewBox=(B=(N=e).curStats,R=N.aplStats,F=B.path_edge,G=B.viewBox_bBox,D=R.viewBox_bBox,z=N.svg.viewBox.baseVal,j=N.svg.style,H=!1,T=Math.max(B.line_strokeWidth/2,B.viewBox_plugBCircleSE[0]||0,B.viewBox_plugBCircleSE[1]||0),W={x1:F.x1-T,y1:F.y1-T,x2:F.x2+T,y2:F.y2+T},N.events.new_edge4viewBox&&N.events.new_edge4viewBox.forEach(function(e){e(N,W)}),G.x=B.lineMask_x=B.lineOutlineMask_x=B.maskBGRect_x=W.x1,G.y=B.lineMask_y=B.lineOutlineMask_y=B.maskBGRect_y=W.y1,G.width=W.x2-W.x1,G.height=W.y2-W.y1,["x","y","width","height"].forEach(function(e){var t;(t=G[e])!==D[e]&&(z[e]=D[e]=t,j[oe[e]]=t+("x"===e||"y"===e?N.bodyOffset[e]:0)+"px",H=!0)}),H),ee.mask=(Y=(U=e).curStats,X=U.aplStats,q=!1,Y.plug_enabled?[0,1].forEach(function(e){Y.capsMaskMarker_enabledSE[e]=Y.plug_enabledSE[e]&&Y.plug_colorTraSE[e]||Y.plugOutline_enabledSE[e]&&Y.plugOutline_colorTraSE[e]}):Y.capsMaskMarker_enabledSE[0]=Y.capsMaskMarker_enabledSE[1]=!1,Y.capsMaskMarker_enabled=Y.capsMaskMarker_enabledSE[0]||Y.capsMaskMarker_enabledSE[1],Y.lineMask_outlineMode=Y.lineOutline_enabled,Y.caps_enabled=Y.capsMaskMarker_enabled||Y.capsMaskAnchor_enabledSE[0]||Y.capsMaskAnchor_enabledSE[1],Y.lineMask_enabled=Y.caps_enabled||Y.lineMask_outlineMode,(Y.lineMask_enabled&&!Y.lineMask_outlineMode||Y.lineOutline_enabled)&&["x","y"].forEach(function(e){var t="maskBGRect_"+e;We(U,X,t,Z=Y[t])&&(U.maskBGRect[e].baseVal.value=Z,q=!0)}),We(U,X,"lineMask_enabled",Z=Y.lineMask_enabled)&&(U.lineFace.style.mask=Z?"url(#"+U.lineMaskId+")":"none",q=!0,ue&&Ae(U,U.lineMask)),Y.lineMask_enabled&&(We(U,X,"lineMask_outlineMode",Z=Y.lineMask_outlineMode)&&(Z?(U.lineMaskBG.style.display="none",U.lineMaskShape.style.display="inline"):(U.lineMaskBG.style.display="inline",U.lineMaskShape.style.display="none"),q=!0),["x","y"].forEach(function(e){var t="lineMask_"+e;We(U,X,t,Z=Y[t])&&(U.lineMask[e].baseVal.value=Z,q=!0)}),We(U,X,"caps_enabled",Z=Y.caps_enabled)&&(U.lineMaskCaps.style.display=U.lineOutlineMaskCaps.style.display=Z?"inline":"none",q=!0,ue&&Ae(U,U.capsMaskLine)),Y.caps_enabled&&([0,1].forEach(function(e){var t;We(U,X.capsMaskAnchor_enabledSE,e,Z=Y.capsMaskAnchor_enabledSE[e])&&(U.capsMaskAnchorSE[e].style.display=Z?"inline":"none",q=!0,ue&&Ae(U,U.lineMask)),Y.capsMaskAnchor_enabledSE[e]&&(Me(t=Y.capsMaskAnchor_pathDataSE[e],X.capsMaskAnchor_pathDataSE[e])&&(U.capsMaskAnchorSE[e].setPathData(t),X.capsMaskAnchor_pathDataSE[e]=t,q=!0),We(U,X.capsMaskAnchor_strokeWidthSE,e,Z=Y.capsMaskAnchor_strokeWidthSE[e])&&(U.capsMaskAnchorSE[e].style.strokeWidth=Z+"px",q=!0))}),We(U,X,"capsMaskMarker_enabled",Z=Y.capsMaskMarker_enabled)&&(U.capsMaskLine.style.display=Z?"inline":"none",q=!0),Y.capsMaskMarker_enabled&&[0,1].forEach(function(n){var e=Y.capsMaskMarker_plugSE[n],t=e!==ne?ae[ie[e]]:null,a=Ne(n,t);We(U,X.capsMaskMarker_enabledSE,n,Z=Y.capsMaskMarker_enabledSE[n])&&(U.capsMaskLine.style[a.prop]=Z?"url(#"+U.lineMaskMarkerIdSE[n]+")":"none",q=!0),Y.capsMaskMarker_enabledSE[n]&&(We(U,X.capsMaskMarker_plugSE,n,e)&&(U.capsMaskMarkerShapeSE[n].href.baseVal="#"+t.elmId,Pe(U,U.capsMaskMarkerSE[n],a.orient,t.bBox,U.svg,U.capsMaskMarkerShapeSE[n],U.capsMaskLine),q=!0,re&&(Ae(U,U.capsMaskLine),Ae(U,U.lineFace))),["markerWidth","markerHeight"].forEach(function(e){var t="capsMaskMarker_"+e+"SE";We(U,X[t],n,Z=Y[t][n])&&(U.capsMaskMarkerSE[n][e].baseVal.value=Z,q=!0)}))}))),Y.lineOutline_enabled&&["x","y"].forEach(function(e){var t="lineOutlineMask_"+e;We(U,X,t,Z=Y[t])&&(U.lineOutlineMask[e].baseVal.value=Z,q=!0)}),q),t.effect&&(J=(Q=e).curStats,$=Q.aplStats,Object.keys(te).forEach(function(e){var t=te[e],n=e+"_enabled",a=e+"_options",i=J[a];We(Q,$,n,K=J[n])?(K&&($[a]=de(i)),t[K?"init":"remove"](Q)):K&&ce(i,$[a])&&(t.remove(Q),$[n]=!0,$[a]=de(i),t.init(Q))})),(se||ue)&&ee.line&&!ee.path&&Ae(e,e.lineShape),se&&ee.plug&&!ee.line&&Ae(e,e.plugsFace),Ve(e)}function ze(e,t){return{duration:w(e.duration)&&0<e.duration?e.duration:t.duration,timing:c.validTiming(e.timing)?e.timing:de(t.timing)}}function je(e,t,n,a){var i,o=e.curStats,l=e.aplStats,r={};function s(){["show_on","show_effect","show_animOptions"].forEach(function(e){l[e]=o[e]})}o.show_on=t,n&&g[n]&&(o.show_effect=n,o.show_animOptions=ze(k(a)?a:{},g[n].defaultAnimOptions)),r.show_on=o.show_on!==l.show_on,r.show_effect=o.show_effect!==l.show_effect,r.show_animOptions=ce(o.show_animOptions,l.show_animOptions),r.show_effect||r.show_animOptions?o.show_inAnim?(i=r.show_effect?g[l.show_effect].stop(e,!0,!0):g[l.show_effect].stop(e),s(),g[l.show_effect].init(e,i)):r.show_on&&(l.show_effect&&r.show_effect&&g[l.show_effect].stop(e,!0,!0),s(),g[l.show_effect].init(e)):r.show_on&&(s(),g[l.show_effect].start(e))}function He(e,t,n){var a={props:e,optionName:n};return!(!(e.attachments.indexOf(t)<0)||t.conf.bind&&!t.conf.bind(t,a))&&(e.attachments.push(t),t.boundTargets.push(a),!0)}function Ue(n,a,e){var i=n.attachments.indexOf(a);-1<i&&n.attachments.splice(i,1),a.boundTargets.some(function(e,t){return e.props===n&&(a.conf.unbind&&a.conf.unbind(a,e),i=t,!0)})&&(a.boundTargets.splice(i,1),e||Le(function(){a.boundTargets.length||o(a)}))}function Ze(s,u){var e,i,h=s.options,p={};function f(e,t,n,a,i){var o={};return n?null!=a?(o.container=e[n],o.key=a):(o.container=e,o.key=n):(o.container=e,o.key=t),o.default=i,o.acceptsAuto=null==o.default,o}function c(e,t,n,a,i,o,l){var r,s,u,h=f(e,n,i,o,l);return null!=t[n]&&(s=(t[n]+"").toLowerCase())&&(h.acceptsAuto&&s===x||(u=a[s]))&&u!==h.container[h.key]&&(h.container[h.key]=u,r=!0),null!=h.container[h.key]||h.acceptsAuto||(h.container[h.key]=h.default,r=!0),r}function d(e,t,n,a,i,o,l,r,s){var u,h,p,c,d=f(e,n,i,o,l);if(!a){if(null==d.default)throw new Error("Invalid `type`: "+n);a=typeof d.default}return null!=t[n]&&(d.acceptsAuto&&(t[n]+"").toLowerCase()===x||(p=h=t[n],("number"===(c=a)?w(p):typeof p===c)&&(h=s&&"string"===a&&h?h.trim():h,1)&&(!r||r(h))))&&h!==d.container[d.key]&&(d.container[d.key]=h,u=!0),null!=d.container[d.key]||d.acceptsAuto||(d.container[d.key]=d.default,u=!0),u}if(u=u||{},["start","end"].forEach(function(e,t){var n=u[e],a=!1;if(n&&(ye(n)||(a=_(n,"anchor")))&&n!==h.anchorSE[t]){if(!1!==s.optionIsAttach.anchorSE[t]&&Ue(s,$[h.anchorSE[t]._id]),a&&!He(s,$[n._id],e))throw new Error("Can't bind attachment");h.anchorSE[t]=n,s.optionIsAttach.anchorSE[t]=a,i=p.position=!0}}),!h.anchorSE[0]||!h.anchorSE[1]||h.anchorSE[0]===h.anchorSE[1])throw new Error("`start` and `end` are required.");i&&(e=function(e,t){var n,a,i;if(!(n=me(e))||!(a=me(t)))throw new Error("Cannot get frames.");return n.length&&a.length&&(n.reverse(),a.reverse(),n.some(function(t){return a.some(function(e){return e===t&&(i=e.contentWindow,!0)})})),i||window}(!1!==s.optionIsAttach.anchorSE[0]?$[h.anchorSE[0]._id].element:h.anchorSE[0],!1!==s.optionIsAttach.anchorSE[1]?$[h.anchorSE[1]._id].element:h.anchorSE[1]))!==s.baseWindow&&(!function(a,e){var t,n,i,o,l,r,s,u,h,p,c=a.aplStats,d=e.document,f=v+"-"+a._id;function y(e){var t=n.appendChild(d.createElementNS(b,"mask"));return t.id=e,t.maskUnits.baseVal=SVGUnitTypes.SVG_UNIT_TYPE_USERSPACEONUSE,[t.x,t.y,t.width,t.height].forEach(function(e){e.baseVal.newValueSpecifiedUnits(SVGLength.SVG_LENGTHTYPE_PX,0)}),t}function S(e){var t=n.appendChild(d.createElementNS(b,"marker"));return t.id=e,t.markerUnits.baseVal=SVGMarkerElement.SVG_MARKERUNITS_STROKEWIDTH,t.viewBox.baseVal||t.setAttribute("viewBox","0 0 0 0"),t}function m(e){return[e.width,e.height].forEach(function(e){e.baseVal.newValueSpecifiedUnits(SVGLength.SVG_LENGTHTYPE_PERCENTAGE,100)}),e}a.pathList={},Te(c,Y),Object.keys(te).forEach(function(e){var t=e+"_enabled";c[t]&&(te[e].remove(a),c[t]=!1)}),a.baseWindow&&a.svg&&a.baseWindow.document.body.removeChild(a.svg),Re(a.baseWindow=e),a.bodyOffset=Be(e),a.svg=t=d.createElementNS(b,"svg"),t.className.baseVal=v,t.viewBox.baseVal||t.setAttribute("viewBox","0 0 0 0"),a.defs=n=t.appendChild(d.createElementNS(b,"defs")),a.linePath=o=n.appendChild(d.createElementNS(b,"path")),o.id=l=f+"-line-path",o.className.baseVal=v+"-line-path",ue&&(o.style.fill="none"),a.lineShape=o=n.appendChild(d.createElementNS(b,"use")),o.id=r=f+"-line-shape",o.href.baseVal="#"+l,(i=n.appendChild(d.createElementNS(b,"g"))).id=s=f+"-caps",a.capsMaskAnchorSE=[0,1].map(function(){var e=i.appendChild(d.createElementNS(b,"path"));return e.className.baseVal=v+"-caps-mask-anchor",e}),a.lineMaskMarkerIdSE=[f+"-caps-mask-marker-0",f+"-caps-mask-marker-1"],a.capsMaskMarkerSE=[0,1].map(function(e){return S(a.lineMaskMarkerIdSE[e])}),a.capsMaskMarkerShapeSE=[0,1].map(function(e){var t=a.capsMaskMarkerSE[e].appendChild(d.createElementNS(b,"use"));return t.className.baseVal=v+"-caps-mask-marker-shape",t}),a.capsMaskLine=o=i.appendChild(d.createElementNS(b,"use")),o.className.baseVal=v+"-caps-mask-line",o.href.baseVal="#"+r,a.maskBGRect=o=m(n.appendChild(d.createElementNS(b,"rect"))),o.id=u=f+"-mask-bg-rect",o.className.baseVal=v+"-mask-bg-rect",ue&&(o.style.fill="white"),a.lineMask=m(y(a.lineMaskId=f+"-line-mask")),a.lineMaskBG=o=a.lineMask.appendChild(d.createElementNS(b,"use")),o.href.baseVal="#"+u,a.lineMaskShape=o=a.lineMask.appendChild(d.createElementNS(b,"use")),o.className.baseVal=v+"-line-mask-shape",o.href.baseVal="#"+l,o.style.display="none",a.lineMaskCaps=o=a.lineMask.appendChild(d.createElementNS(b,"use")),o.href.baseVal="#"+s,a.lineOutlineMask=m(y(h=f+"-line-outline-mask")),(o=a.lineOutlineMask.appendChild(d.createElementNS(b,"use"))).href.baseVal="#"+u,a.lineOutlineMaskShape=o=a.lineOutlineMask.appendChild(d.createElementNS(b,"use")),o.className.baseVal=v+"-line-outline-mask-shape",o.href.baseVal="#"+l,a.lineOutlineMaskCaps=o=a.lineOutlineMask.appendChild(d.createElementNS(b,"use")),o.href.baseVal="#"+s,a.face=t.appendChild(d.createElementNS(b,"g")),a.lineFace=o=a.face.appendChild(d.createElementNS(b,"use")),o.href.baseVal="#"+r,a.lineOutlineFace=o=a.face.appendChild(d.createElementNS(b,"use")),o.href.baseVal="#"+r,o.style.mask="url(#"+h+")",o.style.display="none",a.plugMaskIdSE=[f+"-plug-mask-0",f+"-plug-mask-1"],a.plugMaskSE=[0,1].map(function(e){return y(a.plugMaskIdSE[e])}),a.plugMaskShapeSE=[0,1].map(function(e){var t=a.plugMaskSE[e].appendChild(d.createElementNS(b,"use"));return t.className.baseVal=v+"-plug-mask-shape",t}),p=[],a.plugOutlineMaskSE=[0,1].map(function(e){return y(p[e]=f+"-plug-outline-mask-"+e)}),a.plugOutlineMaskShapeSE=[0,1].map(function(e){var t=a.plugOutlineMaskSE[e].appendChild(d.createElementNS(b,"use"));return t.className.baseVal=v+"-plug-outline-mask-shape",t}),a.plugMarkerIdSE=[f+"-plug-marker-0",f+"-plug-marker-1"],a.plugMarkerSE=[0,1].map(function(e){var t=S(a.plugMarkerIdSE[e]);return ue&&(t.markerUnits.baseVal=SVGMarkerElement.SVG_MARKERUNITS_USERSPACEONUSE),t}),a.plugMarkerShapeSE=[0,1].map(function(e){return a.plugMarkerSE[e].appendChild(d.createElementNS(b,"g"))}),a.plugFaceSE=[0,1].map(function(e){return a.plugMarkerShapeSE[e].appendChild(d.createElementNS(b,"use"))}),a.plugOutlineFaceSE=[0,1].map(function(e){var t=a.plugMarkerShapeSE[e].appendChild(d.createElementNS(b,"use"));return t.style.mask="url(#"+p[e]+")",t.style.display="none",t}),a.plugsFace=o=a.face.appendChild(d.createElementNS(b,"use")),o.className.baseVal=v+"-plugs-face",o.href.baseVal="#"+r,o.style.display="none",a.curStats.show_inAnim?(a.isShown=1,g[c.show_effect].stop(a,!0)):a.isShown||(t.style.visibility="hidden"),d.body.appendChild(t),[0,1,2].forEach(function(e){var t,n=a.options.labelSEM[e];n&&_(n,"label")&&(t=$[n._id]).conf.initSvg&&t.conf.initSvg(t,a)})}(s,e),p.line=p.plug=p.lineOutline=p.plugOutline=p.faces=p.effect=!0),p.position=c(h,u,"path",m,null,null,pe.path)||p.position,p.position=c(h,u,"startSocket",n,"socketSE",0)||p.position,p.position=c(h,u,"endSocket",n,"socketSE",1)||p.position,[u.startSocketGravity,u.endSocketGravity].forEach(function(e,t){var n,a,i=!1;null!=e&&(Array.isArray(e)?w(e[0])&&w(e[1])&&(i=[e[0],e[1]],Array.isArray(h.socketGravitySE[t])&&(n=i,a=h.socketGravitySE[t],n.length===a.length&&n.every(function(e,t){return e===a[t]}))&&(i=!1)):((e+"").toLowerCase()===x?i=null:w(e)&&0<=e&&(i=e),i===h.socketGravitySE[t]&&(i=!1)),!1!==i&&(h.socketGravitySE[t]=i,p.position=!0))}),p.line=d(h,u,"color",null,"lineColor",null,pe.lineColor,null,!0)||p.line,p.line=d(h,u,"size",null,"lineSize",null,pe.lineSize,function(e){return 0<e})||p.line,["startPlug","endPlug"].forEach(function(e,t){p.plug=c(h,u,e,E,"plugSE",t,pe.plugSE[t])||p.plug,p.plug=d(h,u,e+"Color","string","plugColorSE",t,null,null,!0)||p.plug,p.plug=d(h,u,e+"Size",null,"plugSizeSE",t,pe.plugSizeSE[t],function(e){return 0<e})||p.plug}),p.lineOutline=d(h,u,"outline",null,"lineOutlineEnabled",null,pe.lineOutlineEnabled)||p.lineOutline,p.lineOutline=d(h,u,"outlineColor",null,"lineOutlineColor",null,pe.lineOutlineColor,null,!0)||p.lineOutline,p.lineOutline=d(h,u,"outlineSize",null,"lineOutlineSize",null,pe.lineOutlineSize,function(e){return 0<e&&e<=.48})||p.lineOutline,["startPlugOutline","endPlugOutline"].forEach(function(e,t){p.plugOutline=d(h,u,e,null,"plugOutlineEnabledSE",t,pe.plugOutlineEnabledSE[t])||p.plugOutline,p.plugOutline=d(h,u,e+"Color","string","plugOutlineColorSE",t,null,null,!0)||p.plugOutline,p.plugOutline=d(h,u,e+"Size",null,"plugOutlineSizeSE",t,pe.plugOutlineSizeSE[t],function(e){return 1<=e})||p.plugOutline}),["startLabel","endLabel","middleLabel"].forEach(function(e,t){var n,a,i,o=u[e],l=h.labelSEM[t]&&!s.optionIsAttach.labelSEM[t]?$[h.labelSEM[t]._id].text:h.labelSEM[t],r=!1;if((n="string"==typeof o)&&(o=o.trim()),(n||o&&(r=_(o,"label")))&&o!==l){if(h.labelSEM[t]&&(Ue(s,$[h.labelSEM[t]._id]),h.labelSEM[t]=""),o){if(r?(a=$[(i=o)._id]).boundTargets.slice().forEach(function(e){a.conf.removeOption(a,e)}):i=new S(y.captionLabel,[o]),!He(s,$[i._id],e))throw new Error("Can't bind attachment");h.labelSEM[t]=i}s.optionIsAttach.labelSEM[t]=r}}),Object.keys(te).forEach(function(a){var e,t,o=te[a],n=a+"_enabled",i=a+"_options";function l(a){var i={};return o.optionsConf.forEach(function(e){var t=e[0],n=e[3];null==e[4]||i[n]||(i[n]=[]),("function"==typeof t?t:"id"===t?c:d).apply(null,[i,a].concat(e.slice(1)))}),i}function r(e){var t,n=a+"_animOptions";return e.hasOwnProperty("animation")?k(e.animation)?t=s.curStats[n]=ze(e.animation,o.defaultAnimOptions):(t=!!e.animation,s.curStats[n]=t?ze({},o.defaultAnimOptions):null):(t=!!o.defaultEnabled,s.curStats[n]=t?ze({},o.defaultAnimOptions):null),t}u.hasOwnProperty(a)&&(e=u[a],k(e)?(s.curStats[n]=!0,t=s.curStats[i]=l(e),o.anim&&(s.curStats[i].animation=r(e))):(t=s.curStats[n]=!!e)&&(s.curStats[i]=l({}),o.anim&&(s.curStats[i].animation=r({}))),ce(t,h[a])&&(h[a]=t,p.effect=!0))}),De(s,p)}function Ye(e,t,n){var a={options:{anchorSE:[],socketSE:[],socketGravitySE:[],plugSE:[],plugColorSE:[],plugSizeSE:[],plugOutlineEnabledSE:[],plugOutlineColorSE:[],plugOutlineSizeSE:[],labelSEM:["","",""]},optionIsAttach:{anchorSE:[!1,!1],labelSEM:[!1,!1,!1]},curStats:{},aplStats:{},attachments:[],events:{},reflowTargets:[]};Te(a.curStats,Y),Te(a.aplStats,Y),Object.keys(te).forEach(function(e){var t=te[e].stats;Te(a.curStats,t),Te(a.aplStats,t),a.options[e]=!1}),Te(a.curStats,X),Te(a.aplStats,X),a.curStats.show_effect=q,a.curStats.show_animOptions=de(g[q].defaultAnimOptions),Object.defineProperty(this,"_id",{value:++J}),a._id=this._id,K[this._id]=a,1===arguments.length&&(n=e,e=null),n=n||{},(e||t)&&(n=de(n),e&&(n.start=e),t&&(n.end=t)),a.isShown=a.aplStats.show_on=!n.hide,this.setOptions(n)}return te={dash:{stats:{dash_len:{},dash_gap:{},dash_maxOffset:{}},anim:!0,defaultAnimOptions:{duration:1e3,timing:"linear"},optionsConf:[["type","len","number",null,null,null,function(e){return 0<e}],["type","gap","number",null,null,null,function(e){return 0<e}]],init:function(e){Ie(e,"apl_line_strokeWidth",te.dash.update),e.lineFace.style.strokeDashoffset=0,te.dash.update(e)},remove:function(e){var t=e.curStats;Ce(e,"apl_line_strokeWidth",te.dash.update),t.dash_animId&&(c.remove(t.dash_animId),t.dash_animId=null),e.lineFace.style.strokeDasharray="none",e.lineFace.style.strokeDashoffset=0,Te(e.aplStats,te.dash.stats)},update:function(t){var e,n=t.curStats,a=t.aplStats,i=a.dash_options,o=!1;n.dash_len=i.len||2*a.line_strokeWidth,n.dash_gap=i.gap||a.line_strokeWidth,n.dash_maxOffset=n.dash_len+n.dash_gap,o=We(t,a,"dash_len",n.dash_len)||o,(o=We(t,a,"dash_gap",n.dash_gap)||o)&&(t.lineFace.style.strokeDasharray=a.dash_len+","+a.dash_gap),n.dash_animOptions?(o=We(t,a,"dash_maxOffset",n.dash_maxOffset),a.dash_animOptions&&(o||ce(n.dash_animOptions,a.dash_animOptions))&&(n.dash_animId&&(e=c.stop(n.dash_animId),c.remove(n.dash_animId)),a.dash_animOptions=null),a.dash_animOptions||(n.dash_animId=c.add(function(e){return(1-e)*a.dash_maxOffset+"px"},function(e){t.lineFace.style.strokeDashoffset=e},n.dash_animOptions.duration,0,n.dash_animOptions.timing,!1,e),a.dash_animOptions=de(n.dash_animOptions))):a.dash_animOptions&&(n.dash_animId&&(c.remove(n.dash_animId),n.dash_animId=null),t.lineFace.style.strokeDashoffset=0,a.dash_animOptions=null)}},gradient:{stats:{gradient_colorSE:{hasSE:!0},gradient_pointSE:{hasSE:!0,hasProps:!0}},optionsConf:[["type","startColor","string","colorSE",0,null,null,!0],["type","endColor","string","colorSE",1,null,null,!0]],init:function(e){var t,a=e.baseWindow.document,n=e.defs,i=v+"-"+e._id+"-gradient";e.efc_gradient_gradient=t=n.appendChild(a.createElementNS(b,"linearGradient")),t.id=i,t.gradientUnits.baseVal=SVGUnitTypes.SVG_UNIT_TYPE_USERSPACEONUSE,[t.x1,t.y1,t.x2,t.y2].forEach(function(e){e.baseVal.newValueSpecifiedUnits(SVGLength.SVG_LENGTHTYPE_PX,0)}),e.efc_gradient_stopSE=[0,1].map(function(t){var n=e.efc_gradient_gradient.appendChild(a.createElementNS(b,"stop"));try{n.offset.baseVal=t}catch(e){if(e.code!==DOMException.NO_MODIFICATION_ALLOWED_ERR)throw e;n.setAttribute("offset",t)}return n}),Ie(e,"cur_plug_colorSE",te.gradient.update),Ie(e,"apl_path",te.gradient.update),e.curStats.line_altColor=!0,e.lineFace.style.stroke="url(#"+i+")",te.gradient.update(e)},remove:function(e){e.efc_gradient_gradient&&(e.defs.removeChild(e.efc_gradient_gradient),e.efc_gradient_gradient=e.efc_gradient_stopSE=null),Ce(e,"cur_plug_colorSE",te.gradient.update),Ce(e,"apl_path",te.gradient.update),e.curStats.line_altColor=!1,e.lineFace.style.stroke=e.curStats.line_color,Te(e.aplStats,te.gradient.stats)},update:function(a){var e,t,i=a.curStats,o=a.aplStats,n=o.gradient_options,l=a.pathList.animVal||a.pathList.baseVal;[0,1].forEach(function(e){i.gradient_colorSE[e]=n.colorSE[e]||i.plug_colorSE[e]}),t=l[0][0],i.gradient_pointSE[0]={x:t.x,y:t.y},t=(e=l[l.length-1])[e.length-1],i.gradient_pointSE[1]={x:t.x,y:t.y},[0,1].forEach(function(t){var n;We(a,o.gradient_colorSE,t,n=i.gradient_colorSE[t])&&(ue?(n=fe(n),a.efc_gradient_stopSE[t].style.stopColor=n[1],a.efc_gradient_stopSE[t].style.stopOpacity=n[0]):a.efc_gradient_stopSE[t].style.stopColor=n),["x","y"].forEach(function(e){(n=i.gradient_pointSE[t][e])!==o.gradient_pointSE[t][e]&&(a.efc_gradient_gradient[e+(t+1)].baseVal.value=o.gradient_pointSE[t][e]=n)})})}},dropShadow:{stats:{dropShadow_dx:{},dropShadow_dy:{},dropShadow_blur:{},dropShadow_color:{},dropShadow_opacity:{},dropShadow_x:{},dropShadow_y:{}},optionsConf:[["type","dx",null,null,null,2],["type","dy",null,null,null,4],["type","blur",null,null,null,3,function(e){return 0<=e}],["type","color",null,null,null,"#000",null,!0],["type","opacity",null,null,null,.8,function(e){return 0<=e&&e<=1}]],init:function(t){var e,n,a,i,o,l=t.baseWindow.document,r=t.defs,s=v+"-"+t._id+"-dropShadow",u=(e=l,n=s,o={},"boolean"!=typeof p&&(p=!!window.SVGFEDropShadowElement&&!ue),o.elmsAppend=[o.elmFilter=a=e.createElementNS(b,"filter")],a.filterUnits.baseVal=SVGUnitTypes.SVG_UNIT_TYPE_USERSPACEONUSE,a.x.baseVal.newValueSpecifiedUnits(SVGLength.SVG_LENGTHTYPE_PX,0),a.y.baseVal.newValueSpecifiedUnits(SVGLength.SVG_LENGTHTYPE_PX,0),a.width.baseVal.newValueSpecifiedUnits(SVGLength.SVG_LENGTHTYPE_PERCENTAGE,100),a.height.baseVal.newValueSpecifiedUnits(SVGLength.SVG_LENGTHTYPE_PERCENTAGE,100),a.id=n,p?(o.elmOffset=o.elmBlur=i=a.appendChild(e.createElementNS(b,"feDropShadow")),o.styleFlood=i.style):(o.elmBlur=a.appendChild(e.createElementNS(b,"feGaussianBlur")),o.elmOffset=i=a.appendChild(e.createElementNS(b,"feOffset")),i.result.baseVal="offsetblur",i=a.appendChild(e.createElementNS(b,"feFlood")),o.styleFlood=i.style,(i=a.appendChild(e.createElementNS(b,"feComposite"))).in2.baseVal="offsetblur",i.operator.baseVal=SVGFECompositeElement.SVG_FECOMPOSITE_OPERATOR_IN,(i=a.appendChild(e.createElementNS(b,"feMerge"))).appendChild(e.createElementNS(b,"feMergeNode")),i.appendChild(e.createElementNS(b,"feMergeNode")).in1.baseVal="SourceGraphic"),o);["elmFilter","elmOffset","elmBlur","styleFlood","elmsAppend"].forEach(function(e){t["efc_dropShadow_"+e]=u[e]}),u.elmsAppend.forEach(function(e){r.appendChild(e)}),t.face.setAttribute("filter","url(#"+s+")"),Ie(t,"new_edge4viewBox",te.dropShadow.adjustEdge),te.dropShadow.update(t)},remove:function(e){var t=e.defs;e.efc_dropShadow_elmsAppend&&(e.efc_dropShadow_elmsAppend.forEach(function(e){t.removeChild(e)}),e.efc_dropShadow_elmFilter=e.efc_dropShadow_elmOffset=e.efc_dropShadow_elmBlur=e.efc_dropShadow_styleFlood=e.efc_dropShadow_elmsAppend=null),Ce(e,"new_edge4viewBox",te.dropShadow.adjustEdge),De(e,{}),e.face.removeAttribute("filter"),Te(e.aplStats,te.dropShadow.stats)},update:function(e){var t,n,a=e.curStats,i=e.aplStats,o=i.dropShadow_options;a.dropShadow_dx=t=o.dx,We(e,i,"dropShadow_dx",t)&&(e.efc_dropShadow_elmOffset.dx.baseVal=t,n=!0),a.dropShadow_dy=t=o.dy,We(e,i,"dropShadow_dy",t)&&(e.efc_dropShadow_elmOffset.dy.baseVal=t,n=!0),a.dropShadow_blur=t=o.blur,We(e,i,"dropShadow_blur",t)&&(e.efc_dropShadow_elmBlur.setStdDeviation(t,t),n=!0),n&&De(e,{}),a.dropShadow_color=t=o.color,We(e,i,"dropShadow_color",t)&&(e.efc_dropShadow_styleFlood.floodColor=t),a.dropShadow_opacity=t=o.opacity,We(e,i,"dropShadow_opacity",t)&&(e.efc_dropShadow_styleFlood.floodOpacity=t)},adjustEdge:function(a,i){var e,t,o=a.curStats,l=a.aplStats;null!=o.dropShadow_dx&&(e=3*o.dropShadow_blur,(t={x1:i.x1-e+o.dropShadow_dx,y1:i.y1-e+o.dropShadow_dy,x2:i.x2+e+o.dropShadow_dx,y2:i.y2+e+o.dropShadow_dy}).x1<i.x1&&(i.x1=t.x1),t.y1<i.y1&&(i.y1=t.y1),t.x2>i.x2&&(i.x2=t.x2),t.y2>i.y2&&(i.y2=t.y2),["x","y"].forEach(function(e){var t,n="dropShadow_"+e;o[n]=t=i[e+"1"],We(a,l,n,t)&&(a.efc_dropShadow_elmFilter[e].baseVal.value=t)}))}}},Object.keys(te).forEach(function(e){var t=te[e],n=t.stats;n[e+"_enabled"]={iniValue:!1},n[e+"_options"]={hasProps:!0},t.anim&&(n[e+"_animOptions"]={},n[e+"_animId"]={})}),g={none:{defaultAnimOptions:{},init:function(e,t){var n=e.curStats;n.show_animId&&(c.remove(n.show_animId),n.show_animId=null),g.none.start(e,t)},start:function(e,t){g.none.stop(e,!0)},stop:function(e,t,n){var a=e.curStats;return n=null!=n?n:e.aplStats.show_on,a.show_inAnim=!1,t&&Ge(e,n),n?1:0}},fade:{defaultAnimOptions:{duration:300,timing:"linear"},init:function(n,e){var t=n.curStats,a=n.aplStats;t.show_animId&&c.remove(t.show_animId),t.show_animId=c.add(function(e){return e},function(e,t){t?g.fade.stop(n,!0):(n.svg.style.opacity=e+"",le&&(Ae(n,n.svg),Ve(n)))},a.show_animOptions.duration,1,a.show_animOptions.timing,null,!1),g.fade.start(n,e)},start:function(e,t){var n,a=e.curStats;a.show_inAnim&&(n=c.stop(a.show_animId)),Ge(e,1),a.show_inAnim=!0,c.start(a.show_animId,!e.aplStats.show_on,null!=t?t:n)},stop:function(e,t,n){var a,i=e.curStats;return n=null!=n?n:e.aplStats.show_on,a=i.show_inAnim?c.stop(i.show_animId):n?1:0,i.show_inAnim=!1,t&&(e.svg.style.opacity=n?"":"0",Ge(e,n)),a}},draw:{defaultAnimOptions:{duration:500,timing:[.58,0,.42,1]},init:function(n,e){var t=n.curStats,a=n.aplStats,l=n.pathList.baseVal,i=Oe(l),r=i.segsLen,s=i.lenAll;t.show_animId&&c.remove(t.show_animId),t.show_animId=c.add(function(e){var t,n,a,i,o=-1;if(0===e)n=[[l[0][0],l[0][0]]];else if(1===e)n=l;else{for(t=s*e,n=[];t>=r[++o];)n.push(l[o]),t-=r[o];t&&(2===(a=l[o]).length?n.push([a[0],ve(a[0],a[1],t/r[o])]):(i=xe(a[0],a[1],a[2],a[3],ke(a[0],a[1],a[2],a[3],t)),n.push([a[0],i.fromP1,i.fromP2,i])))}return n},function(e,t){t?g.draw.stop(n,!0):(n.pathList.animVal=e,De(n,{path:!0}))},a.show_animOptions.duration,1,a.show_animOptions.timing,null,!1),g.draw.start(n,e)},start:function(e,t){var n,a=e.curStats;a.show_inAnim&&(n=c.stop(a.show_animId)),Ge(e,1),a.show_inAnim=!0,Ie(e,"apl_position",g.draw.update),c.start(a.show_animId,!e.aplStats.show_on,null!=t?t:n)},stop:function(e,t,n){var a,i=e.curStats;return n=null!=n?n:e.aplStats.show_on,a=i.show_inAnim?c.stop(i.show_animId):n?1:0,i.show_inAnim=!1,t&&(e.pathList.animVal=n?null:[[e.pathList.baseVal[0][0],e.pathList.baseVal[0][0]]],De(e,{path:!0}),Ge(e,n)),a},update:function(e){Ce(e,"apl_position",g.draw.update),e.curStats.show_inAnim?g.draw.init(e,g.draw.stop(e)):e.aplStats.show_animOptions={}}}},function(){function r(n){return function(e){var t={};t[n]=e,this.setOptions(t)}}[["start","anchorSE",0],["end","anchorSE",1],["color","lineColor"],["size","lineSize"],["startSocketGravity","socketGravitySE",0],["endSocketGravity","socketGravitySE",1],["startPlugColor","plugColorSE",0],["endPlugColor","plugColorSE",1],["startPlugSize","plugSizeSE",0],["endPlugSize","plugSizeSE",1],["outline","lineOutlineEnabled"],["outlineColor","lineOutlineColor"],["outlineSize","lineOutlineSize"],["startPlugOutline","plugOutlineEnabledSE",0],["endPlugOutline","plugOutlineEnabledSE",1],["startPlugOutlineColor","plugOutlineColorSE",0],["endPlugOutlineColor","plugOutlineColorSE",1],["startPlugOutlineSize","plugOutlineSizeSE",0],["endPlugOutlineSize","plugOutlineSizeSE",1]].forEach(function(e){var t=e[0],n=e[1],a=e[2];Object.defineProperty(Ye.prototype,t,{get:function(){var e=null!=a?K[this._id].options[n][a]:n?K[this._id].options[n]:K[this._id].options[t];return null==e?x:de(e)},set:r(t),enumerable:!0})}),[["path",m],["startSocket",n,"socketSE",0],["endSocket",n,"socketSE",1],["startPlug",E,"plugSE",0],["endPlug",E,"plugSE",1]].forEach(function(e){var a=e[0],i=e[1],o=e[2],l=e[3];Object.defineProperty(Ye.prototype,a,{get:function(){var t,n=null!=l?K[this._id].options[o][l]:o?K[this._id].options[o]:K[this._id].options[a];return n?Object.keys(i).some(function(e){return i[e]===n&&(t=e,!0)})?t:new Error("It's broken"):x},set:r(a),enumerable:!0})}),Object.keys(te).forEach(function(n){var a=te[n];Object.defineProperty(Ye.prototype,n,{get:function(){var u,e,t=K[this._id].options[n];return k(t)?(u=t,e=a.optionsConf.reduce(function(e,t){var n,a=t[0],i=t[1],o=t[2],l=t[3],r=t[4],s=null!=r?u[l][r]:l?u[l]:u[i];return e[i]="id"===a?s?Object.keys(o).some(function(e){return o[e]===s&&(n=e,!0)})?n:new Error("It's broken"):x:null==s?x:de(s),e},{}),a.anim&&(e.animation=de(u.animation)),e):t},set:r(n),enumerable:!0})}),["startLabel","endLabel","middleLabel"].forEach(function(e,n){Object.defineProperty(Ye.prototype,e,{get:function(){var e=K[this._id],t=e.options;return t.labelSEM[n]&&!e.optionIsAttach.labelSEM[n]?$[t.labelSEM[n]._id].text:t.labelSEM[n]||""},set:r(e),enumerable:!0})})}(),Ye.prototype.setOptions=function(e){return Ze(K[this._id],e),this},Ye.prototype.position=function(){return De(K[this._id],{position:!0}),this},Ye.prototype.remove=function(){var t=K[this._id],n=t.curStats;Object.keys(te).forEach(function(e){var t=e+"_animId";n[t]&&c.remove(n[t])}),n.show_animId&&c.remove(n.show_animId),t.attachments.slice().forEach(function(e){Ue(t,e)}),t.baseWindow&&t.svg&&t.baseWindow.document.body.removeChild(t.svg),delete K[this._id]},Ye.prototype.show=function(e,t){return je(K[this._id],!0,e,t),this},Ye.prototype.hide=function(e,t){return je(K[this._id],!1,e,t),this},o=function(t){t&&$[t._id]&&(t.boundTargets.slice().forEach(function(e){Ue(e.props,t,!0)}),t.conf.remove&&t.conf.remove(t),delete $[t._id])},S=function(){function e(e,t){var n,a={conf:e,curStats:{},aplStats:{},boundTargets:[]},i={};e.argOptions.every(function(e){return!(!t.length||("string"==typeof e.type?typeof t[0]!==e.type:"function"!=typeof e.type||!e.type(t[0])))&&(i[e.optionName]=t.shift(),!0)}),n=t.length&&k(t[0])?de(t[0]):{},Object.keys(i).forEach(function(e){n[e]=i[e]}),e.stats&&(Te(a.curStats,e.stats),Te(a.aplStats,e.stats)),Object.defineProperty(this,"_id",{value:++ee}),Object.defineProperty(this,"isRemoved",{get:function(){return!$[this._id]}}),a._id=this._id,e.init&&!e.init(a,n)||($[this._id]=a)}return e.prototype.remove=function(){var t=this,n=$[t._id];n&&(n.boundTargets.slice().forEach(function(e){n.conf.removeOption(n,e)}),Le(function(){var e=$[t._id];e&&(console.error("LeaderLineAttachment was not removed by removeOption"),o(e))}))},e}(),window.LeaderLineAttachment=S,_=function(e,t){return e instanceof S&&(!(e.isRemoved||t&&$[e._id].conf.type!==t)||null)},y={pointAnchor:{type:"anchor",argOptions:[{optionName:"element",type:ye}],init:function(e,t){return e.element=y.pointAnchor.checkElement(t.element),e.x=y.pointAnchor.parsePercent(t.x,!0)||[.5,!0],e.y=y.pointAnchor.parsePercent(t.y,!0)||[.5,!0],!0},removeOption:function(e,t){var n=t.props,a={},i=e.element,o=n.options.anchorSE["start"===t.optionName?1:0];i===o&&(i=o===document.body?new S(y.pointAnchor,[i]):document.body),a[t.optionName]=i,Ze(n,a)},getBBoxNest:function(e,t){var n=ge(e.element,t.baseWindow),a=n.width,i=n.height;return n.width=n.height=0,n.left=n.right=n.left+e.x[0]*(e.x[1]?a:1),n.top=n.bottom=n.top+e.y[0]*(e.y[1]?i:1),n},parsePercent:function(e,t){var n,a,i=!1;return w(e)?a=e:"string"==typeof e&&(n=u.exec(e))&&n[2]&&(i=0!==(a=parseFloat(n[1])/100)),null!=a&&(t||0<=a)?[a,i]:null},checkElement:function(e){if(null==e)e=document.body;else if(!ye(e))throw new Error("`element` must be Element");return e}},areaAnchor:{type:"anchor",argOptions:[{optionName:"element",type:ye},{optionName:"shape",type:"string"}],stats:{color:{},strokeWidth:{},elementWidth:{},elementHeight:{},elementLeft:{},elementTop:{},pathListRel:{},bBoxRel:{},pathData:{},viewBoxBBox:{hasProps:!0},dashLen:{},dashGap:{}},init:function(i,e){var t,n,a,o=[];return i.element=y.pointAnchor.checkElement(e.element),"string"==typeof e.color&&(i.color=e.color.trim()),"string"==typeof e.fillColor&&(i.fill=e.fillColor.trim()),w(e.size)&&0<=e.size&&(i.size=e.size),e.dash&&(i.dash=!0,w(e.dash.len)&&0<e.dash.len&&(i.dashLen=e.dash.len),w(e.dash.gap)&&0<e.dash.gap&&(i.dashGap=e.dash.gap)),"circle"===e.shape?i.shape=e.shape:"polygon"===e.shape&&Array.isArray(e.points)&&3<=e.points.length&&e.points.every(function(e){var t={};return!(!(t.x=y.pointAnchor.parsePercent(e[0],!0))||!(t.y=y.pointAnchor.parsePercent(e[1],!0)))&&(o.push(t),(t.x[1]||t.y[1])&&(i.hasRatio=!0),!0)})?(i.shape=e.shape,i.points=o):(i.shape="rect",i.radius=w(e.radius)&&0<=e.radius?e.radius:0),"rect"!==i.shape&&"circle"!==i.shape||(i.x=y.pointAnchor.parsePercent(e.x,!0)||[-.05,!0],i.y=y.pointAnchor.parsePercent(e.y,!0)||[-.05,!0],i.width=y.pointAnchor.parsePercent(e.width)||[1.1,!0],i.height=y.pointAnchor.parsePercent(e.height)||[1.1,!0],(i.x[1]||i.y[1]||i.width[1]||i.height[1])&&(i.hasRatio=!0)),t=i.element.ownerDocument,i.svg=n=t.createElementNS(b,"svg"),n.className.baseVal=v+"-areaAnchor",n.viewBox.baseVal||n.setAttribute("viewBox","0 0 0 0"),i.path=n.appendChild(t.createElementNS(b,"path")),i.path.style.fill=i.fill||"none",i.isShown=!1,n.style.visibility="hidden",t.body.appendChild(n),Re(a=t.defaultView),i.bodyOffset=Be(a),i.updateColor=function(){var e,t=i.curStats,n=i.aplStats,a=i.boundTargets.length?i.boundTargets[0].props.curStats:null;t.color=e=i.color||(a?a.line_color:pe.lineColor),We(i,n,"color",e)&&(i.path.style.stroke=e)},i.updateShow=function(){Ge(i,i.boundTargets.some(function(e){return!0===e.props.isShown}))},!0},bind:function(e,t){var n=t.props;return e.color||Ie(n,"cur_line_color",e.updateColor),Ie(n,"svgShow",e.updateShow),Le(function(){e.updateColor(),e.updateShow()}),!0},unbind:function(e,t){var n=t.props;e.color||Ce(n,"cur_line_color",e.updateColor),Ce(n,"svgShow",e.updateShow),1<e.boundTargets.length&&Le(function(){e.updateColor(),e.updateShow(),y.areaAnchor.update(e)&&e.boundTargets.forEach(function(e){De(e.props,{position:!0})})})},removeOption:function(e,t){y.pointAnchor.removeOption(e,t)},remove:function(t){t.boundTargets.length&&(console.error("LeaderLineAttachment was not unbound by remove"),t.boundTargets.forEach(function(e){y.areaAnchor.unbind(t,e)})),t.svg.parentNode.removeChild(t.svg)},getStrokeWidth:function(e,t){return y.areaAnchor.update(e)&&1<e.boundTargets.length&&Le(function(){e.boundTargets.forEach(function(e){e.props!==t&&De(e.props,{position:!0})})}),e.curStats.strokeWidth},getPathData:function(e,t){var n=ge(e.element,t.baseWindow);return we(e.curStats.pathListRel,function(e){e.x+=n.left,e.y+=n.top})},getBBoxNest:function(e,t){var n=ge(e.element,t.baseWindow),a=e.curStats.bBoxRel;return{left:a.left+n.left,top:a.top+n.top,right:a.right+n.left,bottom:a.bottom+n.top,width:a.width,height:a.height}},update:function(t){var a,n,i,o,e,l,r,s,u,h,p,c,d,f,y,S,m,g,_,v,E,x,b,k,w,O,M,I,C,L,A,V,P=t.curStats,N=t.aplStats,T=t.boundTargets.length?t.boundTargets[0].props.curStats:null,W={};if(W.strokeWidth=We(t,P,"strokeWidth",null!=t.size?t.size:T?T.line_strokeWidth:pe.lineSize),a=Se(t.element),W.elementWidth=We(t,P,"elementWidth",a.width),W.elementHeight=We(t,P,"elementHeight",a.height),W.elementLeft=We(t,P,"elementLeft",a.left),W.elementTop=We(t,P,"elementTop",a.top),W.strokeWidth||t.hasRatio&&(W.elementWidth||W.elementHeight)){switch(t.shape){case"rect":(v={left:t.x[0]*(t.x[1]?a.width:1),top:t.y[0]*(t.y[1]?a.height:1),width:t.width[0]*(t.width[1]?a.width:1),height:t.height[0]*(t.height[1]?a.height:1)}).right=v.left+v.width,v.bottom=v.top+v.height,k=P.strokeWidth/2,x=(b=Math.min(v.width,v.height))?b/2*Math.SQRT2+k:0,(E=t.radius?t.radius<=x?t.radius:x:0)?(O=E-(w=(E-k)/Math.SQRT2),I=E*U,M=[{x:v.left-O,y:v.top+w},{x:v.left+w,y:v.top-O},{x:v.right-w,y:v.top-O},{x:v.right+O,y:v.top+w},{x:v.right+O,y:v.bottom-w},{x:v.right-w,y:v.bottom+O},{x:v.left+w,y:v.bottom+O},{x:v.left-O,y:v.bottom-w}],P.pathListRel=[[M[0],{x:M[0].x,y:M[0].y-I},{x:M[1].x-I,y:M[1].y},M[1]]],M[1].x!==M[2].x&&P.pathListRel.push([M[1],M[2]]),P.pathListRel.push([M[2],{x:M[2].x+I,y:M[2].y},{x:M[3].x,y:M[3].y-I},M[3]]),M[3].y!==M[4].y&&P.pathListRel.push([M[3],M[4]]),P.pathListRel.push([M[4],{x:M[4].x,y:M[4].y+I},{x:M[5].x+I,y:M[5].y},M[5]]),M[5].x!==M[6].x&&P.pathListRel.push([M[5],M[6]]),P.pathListRel.push([M[6],{x:M[6].x-I,y:M[6].y},{x:M[7].x,y:M[7].y+I},M[7]]),M[7].y!==M[0].y&&P.pathListRel.push([M[7],M[0]]),P.pathListRel.push([]),O=E-w+P.strokeWidth/2,M=[{x:v.left-O,y:v.top-O},{x:v.right+O,y:v.bottom+O}]):(O=P.strokeWidth/2,M=[{x:v.left-O,y:v.top-O},{x:v.right+O,y:v.bottom+O}],P.pathListRel=[[M[0],{x:M[1].x,y:M[0].y}],[{x:M[1].x,y:M[0].y},M[1]],[M[1],{x:M[0].x,y:M[1].y}],[]],M=[{x:v.left-P.strokeWidth,y:v.top-P.strokeWidth},{x:v.right+P.strokeWidth,y:v.bottom+P.strokeWidth}]),P.bBoxRel={left:M[0].x,top:M[0].y,right:M[1].x,bottom:M[1].y,width:M[1].x-M[0].x,height:M[1].y-M[0].y};break;case"circle":(r={left:t.x[0]*(t.x[1]?a.width:1),top:t.y[0]*(t.y[1]?a.height:1),width:t.width[0]*(t.width[1]?a.width:1),height:t.height[0]*(t.height[1]?a.height:1)}).width||r.height||(r.width=r.height=10),r.width||(r.width=r.height),r.height||(r.height=r.width),r.right=r.left+r.width,r.bottom=r.top+r.height,s=r.left+r.width/2,u=r.top+r.height/2,f=P.strokeWidth/2,y=r.width/2,S=r.height/2,h=y*Math.SQRT2+f,p=S*Math.SQRT2+f,c=h*U,d=p*U,_=[{x:s-h,y:u},{x:s,y:u-p},{x:s+h,y:u},{x:s,y:u+p}],P.pathListRel=[[_[0],{x:_[0].x,y:_[0].y-d},{x:_[1].x-c,y:_[1].y},_[1]],[_[1],{x:_[1].x+c,y:_[1].y},{x:_[2].x,y:_[2].y-d},_[2]],[_[2],{x:_[2].x,y:_[2].y+d},{x:_[3].x+c,y:_[3].y},_[3]],[_[3],{x:_[3].x-c,y:_[3].y},{x:_[0].x,y:_[0].y+d},_[0]],[]],m=h-y+P.strokeWidth/2,g=p-S+P.strokeWidth/2,_=[{x:r.left-m,y:r.top-g},{x:r.right+m,y:r.bottom+g}],P.bBoxRel={left:_[0].x,top:_[0].y,right:_[1].x,bottom:_[1].y,width:_[1].x-_[0].x,height:_[1].y-_[0].y};break;case"polygon":t.points.forEach(function(e){var t=e.x[0]*(e.x[1]?a.width:1),n=e.y[0]*(e.y[1]?a.height:1);i?(t<i.left&&(i.left=t),t>i.right&&(i.right=t),n<i.top&&(i.top=n),n>i.bottom&&(i.bottom=n)):i={left:t,right:t,top:n,bottom:n},o?P.pathListRel.push([o,{x:t,y:n}]):P.pathListRel=[],o={x:t,y:n}}),P.pathListRel.push([]),e=P.strokeWidth/2,l=[{x:i.left-e,y:i.top-e},{x:i.right+e,y:i.bottom+e}],P.bBoxRel={left:l[0].x,top:l[0].y,right:l[1].x,bottom:l[1].y,width:l[1].x-l[0].x,height:l[1].y-l[0].y}}W.pathListRel=W.bBoxRel=!0}return(W.pathListRel||W.elementLeft||W.elementTop)&&(P.pathData=we(P.pathListRel,function(e){e.x+=a.left,e.y+=a.top})),We(t,N,"strokeWidth",n=P.strokeWidth)&&(t.path.style.strokeWidth=n+"px"),Me(n=P.pathData,N.pathData)&&(t.path.setPathData(n),N.pathData=n,W.pathData=!0),t.dash&&(!W.pathData&&(!W.strokeWidth||t.dashLen&&t.dashGap)||(P.dashLen=t.dashLen||2*P.strokeWidth,P.dashGap=t.dashGap||P.strokeWidth),W.dash=We(t,N,"dashLen",P.dashLen)||W.dash,W.dash=We(t,N,"dashGap",P.dashGap)||W.dash,W.dash&&(t.path.style.strokeDasharray=N.dashLen+","+N.dashGap)),C=P.viewBoxBBox,L=N.viewBoxBBox,A=t.svg.viewBox.baseVal,V=t.svg.style,C.x=P.bBoxRel.left+a.left,C.y=P.bBoxRel.top+a.top,C.width=P.bBoxRel.width,C.height=P.bBoxRel.height,["x","y","width","height"].forEach(function(e){(n=C[e])!==L[e]&&(A[e]=L[e]=n,V[oe[e]]=n+("x"===e||"y"===e?t.bodyOffset[e]:0)+"px")}),W.strokeWidth||W.pathListRel||W.bBoxRel}},mouseHoverAnchor:{type:"anchor",argOptions:[{optionName:"element",type:ye},{optionName:"showEffectName",type:"string"}],style:{backgroundImage:"url('data:image/svg+xml;charset=utf-8;base64,PHN2ZyB2ZXJzaW9uPSIxLjEiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgd2lkdGg9IjI0IiBoZWlnaHQ9IjI0Ij48cG9seWdvbiBwb2ludHM9IjI0LDAgMCw4IDgsMTEgMCwxOSA1LDI0IDEzLDE2IDE2LDI0IiBmaWxsPSJjb3JhbCIvPjwvc3ZnPg==')",backgroundSize:"",backgroundRepeat:"no-repeat",backgroundColor:"#f8f881",cursor:"default"},hoverStyle:{backgroundImage:"none",backgroundColor:"#fadf8f"},padding:{top:1,right:15,bottom:1,left:2},minHeight:15,backgroundPosition:{right:2,top:2},backgroundSize:{width:12,height:12},dirKeys:[["top","Top"],["right","Right"],["bottom","Bottom"],["left","Left"]],init:function(a,i){var o,t,e,n,l,r,s,u,h,p,c,d=y.mouseHoverAnchor,f={};if(a.element=y.pointAnchor.checkElement(i.element),u=a.element,!((p=u.ownerDocument)&&(h=p.defaultView)&&h.HTMLElement&&u instanceof h.HTMLElement))throw new Error("`element` must be HTML element");return d.style.backgroundSize=d.backgroundSize.width+"px "+d.backgroundSize.height+"px",["style","hoverStyle"].forEach(function(e){var n=d[e];a[e]=Object.keys(n).reduce(function(e,t){return e[t]=n[t],e},{})}),"inline"===(o=a.element.ownerDocument.defaultView.getComputedStyle(a.element,"")).display?a.style.display="inline-block":"none"===o.display&&(a.style.display="block"),y.mouseHoverAnchor.dirKeys.forEach(function(e){var t=e[0],n="padding"+e[1];parseFloat(o[n])<d.padding[t]&&(a.style[n]=d.padding[t]+"px")}),a.style.display&&(n=a.element.style.display,a.element.style.display=a.style.display),y.mouseHoverAnchor.dirKeys.forEach(function(e){var t="padding"+e[1];a.style[t]&&(f[t]=a.element.style[t],a.element.style[t]=a.style[t])}),(e=a.element.getBoundingClientRect()).height<d.minHeight&&(le?(c=d.minHeight,"content-box"===o.boxSizing?c-=parseFloat(o.borderTopWidth)+parseFloat(o.borderBottomWidth)+parseFloat(o.paddingTop)+parseFloat(o.paddingBottom):"padding-box"===o.boxSizing&&(c-=parseFloat(o.borderTopWidth)+parseFloat(o.borderBottomWidth)),a.style.height=c+"px"):a.style.height=parseFloat(o.height)+(d.minHeight-e.height)+"px"),a.style.backgroundPosition=ue?e.width-d.backgroundSize.width-d.backgroundPosition.right+"px "+d.backgroundPosition.top+"px":"right "+d.backgroundPosition.right+"px top "+d.backgroundPosition.top+"px",a.style.display&&(a.element.style.display=n),y.mouseHoverAnchor.dirKeys.forEach(function(e){var t="padding"+e[1];a.style[t]&&(a.element.style[t]=f[t])}),["style","hoverStyle"].forEach(function(e){var t=a[e],n=i[e];k(n)&&Object.keys(n).forEach(function(e){"string"==typeof n[e]||w(n[e])?t[e]=n[e]:null==n[e]&&delete t[e]})}),"function"==typeof i.onSwitch&&(s=i.onSwitch),i.showEffectName&&g[i.showEffectName]&&(a.showEffectName=l=i.showEffectName),r=i.animOptions,a.elmStyle=t=a.element.style,a.mouseenter=function(e){a.hoverStyleSave=d.getStyles(t,Object.keys(a.hoverStyle)),d.setStyles(t,a.hoverStyle),a.boundTargets.forEach(function(e){je(e.props,!0,l,r)}),s&&s(e)},a.mouseleave=function(e){d.setStyles(t,a.hoverStyleSave),a.boundTargets.forEach(function(e){je(e.props,!1,l,r)}),s&&s(e)},!0},bind:function(e,t){var n,a,i,o,l;return t.props.svg?y.mouseHoverAnchor.llShow(t.props,!1,e.showEffectName):Le(function(){y.mouseHoverAnchor.llShow(t.props,!1,e.showEffectName)}),e.enabled||(e.styleSave=y.mouseHoverAnchor.getStyles(e.elmStyle,Object.keys(e.style)),y.mouseHoverAnchor.setStyles(e.elmStyle,e.style),e.removeEventListener=(n=e.element,a=e.mouseenter,i=e.mouseleave,"onmouseenter"in n&&"onmouseleave"in n?(n.addEventListener("mouseenter",a,!1),n.addEventListener("mouseleave",i,!1),function(){n.removeEventListener("mouseenter",a,!1),n.removeEventListener("mouseleave",i,!1)}):(console.warn("mouseenter and mouseleave events polyfill is enabled."),o=function(e){e.relatedTarget&&(e.relatedTarget===this||this.compareDocumentPosition(e.relatedTarget)&Node.DOCUMENT_POSITION_CONTAINED_BY)||a.apply(this,arguments)},n.addEventListener("mouseover",o),l=function(e){e.relatedTarget&&(e.relatedTarget===this||this.compareDocumentPosition(e.relatedTarget)&Node.DOCUMENT_POSITION_CONTAINED_BY)||i.apply(this,arguments)},n.addEventListener("mouseout",l),function(){n.removeEventListener("mouseover",o,!1),n.removeEventListener("mouseout",l,!1)})),e.enabled=!0),!0},unbind:function(e,t){e.enabled&&e.boundTargets.length<=1&&(e.removeEventListener(),y.mouseHoverAnchor.setStyles(e.elmStyle,e.styleSave),e.enabled=!1),y.mouseHoverAnchor.llShow(t.props,!0,e.showEffectName)},removeOption:function(e,t){y.pointAnchor.removeOption(e,t)},remove:function(t){t.boundTargets.length&&(console.error("LeaderLineAttachment was not unbound by remove"),t.boundTargets.forEach(function(e){y.mouseHoverAnchor.unbind(t,e)}))},getBBoxNest:function(e,t){return ge(e.element,t.baseWindow)},llShow:function(e,t,n){g[n||e.curStats.show_effect].stop(e,!0,t),e.aplStats.show_on=t},getStyles:function(n,e){return e.reduce(function(e,t){return e[t]=n[t],e},{})},setStyles:function(t,n){Object.keys(n).forEach(function(e){t[e]=n[e]})}},captionLabel:{type:"label",argOptions:[{optionName:"text",type:"string"}],stats:{color:{},x:{},y:{}},textStyleProps:["fontFamily","fontStyle","fontVariant","fontWeight","fontStretch","fontSize","fontSizeAdjust","kerning","letterSpacing","wordSpacing","textDecoration"],init:function(u,t){return"string"==typeof t.text&&(u.text=t.text.trim()),!!u.text&&("string"==typeof t.color&&(u.color=t.color.trim()),u.outlineColor="string"==typeof t.outlineColor?t.outlineColor.trim():"#fff",Array.isArray(t.offset)&&w(t.offset[0])&&w(t.offset[1])&&(u.offset={x:t.offset[0],y:t.offset[1]}),w(t.lineOffset)&&(u.lineOffset=t.lineOffset),y.captionLabel.textStyleProps.forEach(function(e){null!=t[e]&&(u[e]=t[e])}),u.updateColor=function(e){y.captionLabel.updateColor(u,e)},u.updateSocketXY=function(e){var t,n,a,i,o=u.curStats,l=u.aplStats,r=e.curStats,s=r.position_socketXYSE[u.socketIndex];null!=s.x&&(u.offset?(o.x=s.x+u.offset.x,o.y=s.y+u.offset.y):(t=u.height/2,n=Math.max(r.attach_plugSideLenSE[u.socketIndex]||0,r.line_strokeWidth/2),a=r.position_socketXYSE[u.socketIndex?0:1],s.socketId===L||s.socketId===I?(o.x=s.socketId===L?s.x-t-u.width:s.x+t,o.y=a.y<s.y?s.y+n+t:s.y-n-t-u.height):(o.x=a.x<s.x?s.x+n+t:s.x-n-t-u.width,o.y=s.socketId===M?s.y-t-u.height:s.y+t)),We(u,l,"x",i=o.x)&&(u.elmPosition.x.baseVal.getItem(0).value=i),We(u,l,"y",i=o.y)&&(u.elmPosition.y.baseVal.getItem(0).value=i+u.height))},u.updatePath=function(e){var t,n,a=u.curStats,i=u.aplStats,o=e.pathList.animVal||e.pathList.baseVal;o&&(t=y.captionLabel.getMidPoint(o,u.lineOffset),a.x=t.x-u.width/2,a.y=t.y-u.height/2,We(u,i,"x",n=a.x)&&(u.elmPosition.x.baseVal.getItem(0).value=n),We(u,i,"y",n=a.y)&&(u.elmPosition.y.baseVal.getItem(0).value=n+u.height))},u.updateShow=function(e){y.captionLabel.updateShow(u,e)},ue&&(u.adjustEdge=function(e,t){var n=u.curStats;null!=n.x&&y.captionLabel.adjustEdge(t,{x:n.x,y:n.y,width:u.width,height:u.height},u.strokeWidth/2)}),!0)},updateColor:function(e,t){var n,a=e.curStats,i=e.aplStats,o=t.curStats;a.color=n=e.color||o.line_color,We(e,i,"color",n)&&(e.styleFill.fill=n)},updateShow:function(e,t){var n=!0===t.isShown;n!==e.isShown&&(e.styleShow.visibility=n?"":"hidden",e.isShown=n)},adjustEdge:function(e,t,n){var a={x1:t.x-n,y1:t.y-n,x2:t.x+t.width+n,y2:t.y+t.height+n};a.x1<e.x1&&(e.x1=a.x1),a.y1<e.y1&&(e.y1=a.y1),a.x2>e.x2&&(e.x2=a.x2),a.y2>e.y2&&(e.y2=a.y2)},newText:function(e,t,n,a,i){var o,l,r,s,u,h;return(o=t.createElementNS(b,"text")).textContent=e,[o.x,o.y].forEach(function(e){var t=n.createSVGLength();t.newValueSpecifiedUnits(SVGLength.SVG_LENGTHTYPE_PX,0),e.baseVal.initialize(t)}),"boolean"!=typeof f&&(f="paintOrder"in o.style),i&&!f?(r=t.createElementNS(b,"defs"),o.id=a,r.appendChild(o),(u=(l=t.createElementNS(b,"g")).appendChild(t.createElementNS(b,"use"))).href.baseVal="#"+a,(s=l.appendChild(t.createElementNS(b,"use"))).href.baseVal="#"+a,(h=u.style).strokeLinejoin="round",{elmPosition:o,styleText:o.style,styleFill:s.style,styleStroke:h,styleShow:l.style,elmsAppend:[r,l]}):(h=o.style,i&&(h.strokeLinejoin="round",h.paintOrder="stroke"),{elmPosition:o,styleText:h,styleFill:h,styleStroke:i?h:null,styleShow:h,elmsAppend:[o]})},getMidPoint:function(e,t){var n,a,i,o=Oe(e),l=o.segsLen,r=o.lenAll,s=-1;if((n=r/2+(t||0))<=0)return 2===(a=e[0]).length?ve(a[0],a[1],0):xe(a[0],a[1],a[2],a[3],0);if(r<=n)return 2===(a=e[e.length-1]).length?ve(a[0],a[1],1):xe(a[0],a[1],a[2],a[3],1);for(i=[];n>l[++s];)i.push(e[s]),n-=l[s];return 2===(a=e[s]).length?ve(a[0],a[1],n/l[s]):xe(a[0],a[1],a[2],a[3],ke(a[0],a[1],a[2],a[3],n))},initSvg:function(t,n){var e,a,i=y.captionLabel.newText(t.text,n.baseWindow.document,n.svg,v+"-captionLabel-"+t._id,t.outlineColor);["elmPosition","styleFill","styleShow","elmsAppend"].forEach(function(e){t[e]=i[e]}),t.isShown=!1,t.styleShow.visibility="hidden",y.captionLabel.textStyleProps.forEach(function(e){null!=t[e]&&(i.styleText[e]=t[e])}),i.elmsAppend.forEach(function(e){n.svg.appendChild(e)}),e=i.elmPosition.getBBox(),t.width=e.width,t.height=e.height,t.outlineColor&&(a=10<(a=e.height/9)?10:a<2?2:a,i.styleStroke.strokeWidth=a+"px",i.styleStroke.stroke=t.outlineColor),t.strokeWidth=a||0,Te(t.aplStats,y.captionLabel.stats),t.updateColor(n),t.refSocketXY?t.updateSocketXY(n):t.updatePath(n),ue&&De(n,{}),t.updateShow(n)},bind:function(e,t){var n=t.props;return e.color||Ie(n,"cur_line_color",e.updateColor),(e.refSocketXY="startLabel"===t.optionName||"endLabel"===t.optionName)?(e.socketIndex="startLabel"===t.optionName?0:1,Ie(n,"apl_position",e.updateSocketXY),e.offset||(Ie(n,"cur_attach_plugSideLenSE",e.updateSocketXY),Ie(n,"cur_line_strokeWidth",e.updateSocketXY))):Ie(n,"apl_path",e.updatePath),Ie(n,"svgShow",e.updateShow),ue&&Ie(n,"new_edge4viewBox",e.adjustEdge),y.captionLabel.initSvg(e,n),!0},unbind:function(e,t){var n=t.props;e.elmsAppend&&(e.elmsAppend.forEach(function(e){n.svg.removeChild(e)}),e.elmPosition=e.styleFill=e.styleShow=e.elmsAppend=null),Te(e.curStats,y.captionLabel.stats),Te(e.aplStats,y.captionLabel.stats),e.color||Ce(n,"cur_line_color",e.updateColor),e.refSocketXY?(Ce(n,"apl_position",e.updateSocketXY),e.offset||(Ce(n,"cur_attach_plugSideLenSE",e.updateSocketXY),Ce(n,"cur_line_strokeWidth",e.updateSocketXY))):Ce(n,"apl_path",e.updatePath),Ce(n,"svgShow",e.updateShow),ue&&(Ce(n,"new_edge4viewBox",e.adjustEdge),De(n,{}))},removeOption:function(e,t){var n=t.props,a={};a[t.optionName]="",Ze(n,a)},remove:function(t){t.boundTargets.length&&(console.error("LeaderLineAttachment was not unbound by remove"),t.boundTargets.forEach(function(e){y.captionLabel.unbind(t,e)}))}},pathLabel:{type:"label",argOptions:[{optionName:"text",type:"string"}],stats:{color:{},startOffset:{},pathData:{}},init:function(s,t){return"string"==typeof t.text&&(s.text=t.text.trim()),!!s.text&&("string"==typeof t.color&&(s.color=t.color.trim()),s.outlineColor="string"==typeof t.outlineColor?t.outlineColor.trim():"#fff",w(t.lineOffset)&&(s.lineOffset=t.lineOffset),y.captionLabel.textStyleProps.forEach(function(e){null!=t[e]&&(s[e]=t[e])}),s.updateColor=function(e){y.captionLabel.updateColor(s,e)},s.updatePath=function(e){var t,n=s.curStats,a=s.aplStats,i=e.curStats,o=e.pathList.animVal||e.pathList.baseVal;o&&(n.pathData=t=y.pathLabel.getOffsetPathData(o,i.line_strokeWidth/2+s.strokeWidth/2+s.height/4,1.25*s.height),Me(t,a.pathData)&&(s.elmPath.setPathData(t),a.pathData=t,s.bBox=s.elmPosition.getBBox(),s.updateStartOffset(e)))},s.updateStartOffset=function(e){var t,n,a,i,o=s.curStats,l=s.aplStats,r=e.curStats;o.pathData&&((2!==s.semIndex||s.lineOffset)&&(t=o.pathData.reduce(function(e,t){var n,a=t.values;switch(t.type){case"M":i={x:a[0],y:a[1]};break;case"L":n={x:a[0],y:a[1]},i&&(e+=_e(i,n)),i=n;break;case"C":n={x:a[4],y:a[5]},i&&(e+=be(i,{x:a[0],y:a[1]},{x:a[2],y:a[3]},n)),i=n}return e},0),a=0===s.semIndex?0:1===s.semIndex?t:t/2,2!==s.semIndex&&(n=Math.max(r.attach_plugBackLenSE[s.semIndex]||0,r.line_strokeWidth/2)+s.strokeWidth/2+s.height/4,a=(a+=0===s.semIndex?n:-n)<0?0:t<a?t:a),s.lineOffset&&(a=(a+=s.lineOffset)<0?0:t<a?t:a),o.startOffset=a,We(s,l,"startOffset",a)&&(s.elmOffset.startOffset.baseVal.value=a)))},s.updateShow=function(e){y.captionLabel.updateShow(s,e)},ue&&(s.adjustEdge=function(e,t){s.bBox&&y.captionLabel.adjustEdge(t,s.bBox,s.strokeWidth/2)}),!0)},getOffsetPathData:function(e,x,n){var b,a,i=3,k=[];function w(e,t){return Math.abs(e.x-t.x)<i&&Math.abs(e.y-t.y)<i}return e.forEach(function(e){var t,n,a,i,o,l,r,s,u,h,p,c,d,f,y,S,m,g,_,v,E;2===e.length?(g=e[0],_=e[1],v=x,E=Math.atan2(g.y-_.y,_.x-g.x)+.5*Math.PI,t=[{x:g.x+Math.cos(E)*v,y:g.y+Math.sin(E)*v*-1},{x:_.x+Math.cos(E)*v,y:_.y+Math.sin(E)*v*-1}],b?(a=b.points,0<=(i=Math.atan2(a[1].y-a[0].y,a[0].x-a[1].x)-Math.atan2(e[0].y-e[1].y,e[1].x-e[0].x))&&i<=Math.PI?n={type:"line",points:t,inside:!0}:(l=Ee(a[0],a[1],x),o=Ee(t[1],t[0],x),s=a[0],h=o,p=t[1],c=(u=l).x-s.x,d=u.y-s.y,f=p.x-h.x,y=p.y-h.y,S=(-d*(s.x-h.x)+c*(s.y-h.y))/(-f*d+c*y),m=(f*(s.y-h.y)-y*(s.x-h.x))/(-f*d+c*y),(r=0<=S&&S<=1&&0<=m&&m<=1?{x:s.x+m*c,y:s.y+m*d}:null)?n={type:"line",points:[a[1]=r,t[1]]}:(a[1]=w(o,l)?o:l,n={type:"line",points:[o,t[1]]}),b.len=_e(a[0],a[1]))):n={type:"line",points:t},n.len=_e(n.points[0],n.points[1]),k.push(b=n)):(k.push({type:"cubic",points:function(e,t,n,a,i,o){for(var l,r,s=be(e,t,n,a)/o,u=1/(o<i?s*(i/o):s),h=[],p=0;r=(90-(l=xe(e,t,n,a,p)).angle)*(Math.PI/180),h.push({x:l.x+Math.cos(r)*i,y:l.y+Math.sin(r)*i*-1}),!(1<=p);)1<(p+=u)&&(p=1);return h}(e[0],e[1],e[2],e[3],x,16)}),b=null)}),b=null,k.forEach(function(e){var t;"line"===e.type?(e.inside&&(b.len>x?((t=b.points)[1]=Ee(t[0],t[1],-x),b.len=_e(t[0],t[1])):(b.points=null,b.len=0),e.len>x+n?((t=e.points)[0]=Ee(t[1],t[0],-(x+n)),e.len=_e(t[0],t[1])):(e.points=null,e.len=0)),b=e):b=null}),k.reduce(function(t,e){var n=e.points;return n&&(a&&w(n[0],a)||t.push({type:"M",values:[n[0].x,n[0].y]}),"line"===e.type?t.push({type:"L",values:[n[1].x,n[1].y]}):(n.shift(),n.forEach(function(e){t.push({type:"L",values:[e.x,e.y]})})),a=n[n.length-1]),t},[])},newText:function(e,t,n,a){var i,o,l,r,s,u,h,p,c,d;return(r=(l=t.createElementNS(b,"defs")).appendChild(t.createElementNS(b,"path"))).id=i=n+"-path",(u=(s=t.createElementNS(b,"text")).appendChild(t.createElementNS(b,"textPath"))).href.baseVal="#"+i,u.startOffset.baseVal.newValueSpecifiedUnits(SVGLength.SVG_LENGTHTYPE_PX,0),u.textContent=e,"boolean"!=typeof f&&(f="paintOrder"in s.style),a&&!f?(s.id=o=n+"-text",l.appendChild(s),(c=(h=t.createElementNS(b,"g")).appendChild(t.createElementNS(b,"use"))).href.baseVal="#"+o,(p=h.appendChild(t.createElementNS(b,"use"))).href.baseVal="#"+o,(d=c.style).strokeLinejoin="round",{elmPosition:s,elmPath:r,elmOffset:u,styleText:s.style,styleFill:p.style,styleStroke:d,styleShow:h.style,elmsAppend:[l,h]}):(d=s.style,a&&(d.strokeLinejoin="round",d.paintOrder="stroke"),{elmPosition:s,elmPath:r,elmOffset:u,styleText:d,styleFill:d,styleStroke:a?d:null,styleShow:d,elmsAppend:[l,s]})},initSvg:function(t,n){var e,a,i=y.pathLabel.newText(t.text,n.baseWindow.document,v+"-pathLabel-"+t._id,t.outlineColor);["elmPosition","elmPath","elmOffset","styleFill","styleShow","elmsAppend"].forEach(function(e){t[e]=i[e]}),t.isShown=!1,t.styleShow.visibility="hidden",y.captionLabel.textStyleProps.forEach(function(e){null!=t[e]&&(i.styleText[e]=t[e])}),i.elmsAppend.forEach(function(e){n.svg.appendChild(e)}),i.elmPath.setPathData([{type:"M",values:[0,100]},{type:"h",values:[100]}]),e=i.elmPosition.getBBox(),i.styleText.textAnchor=["start","end","middle"][t.semIndex],2!==t.semIndex||t.lineOffset||i.elmOffset.startOffset.baseVal.newValueSpecifiedUnits(SVGLength.SVG_LENGTHTYPE_PERCENTAGE,50),t.height=e.height,t.outlineColor&&(a=10<(a=e.height/9)?10:a<2?2:a,i.styleStroke.strokeWidth=a+"px",i.styleStroke.stroke=t.outlineColor),t.strokeWidth=a||0,Te(t.aplStats,y.pathLabel.stats),t.updateColor(n),t.updatePath(n),t.updateStartOffset(n),ue&&De(n,{}),t.updateShow(n)},bind:function(e,t){var n=t.props;return e.color||Ie(n,"cur_line_color",e.updateColor),Ie(n,"cur_line_strokeWidth",e.updatePath),Ie(n,"apl_path",e.updatePath),e.semIndex="startLabel"===t.optionName?0:"endLabel"===t.optionName?1:2,(2!==e.semIndex||e.lineOffset)&&Ie(n,"cur_attach_plugBackLenSE",e.updateStartOffset),Ie(n,"svgShow",e.updateShow),ue&&Ie(n,"new_edge4viewBox",e.adjustEdge),y.pathLabel.initSvg(e,n),!0},unbind:function(e,t){var n=t.props;e.elmsAppend&&(e.elmsAppend.forEach(function(e){n.svg.removeChild(e)}),e.elmPosition=e.elmPath=e.elmOffset=e.styleFill=e.styleShow=e.elmsAppend=null),Te(e.curStats,y.pathLabel.stats),Te(e.aplStats,y.pathLabel.stats),e.color||Ce(n,"cur_line_color",e.updateColor),Ce(n,"cur_line_strokeWidth",e.updatePath),Ce(n,"apl_path",e.updatePath),(2!==e.semIndex||e.lineOffset)&&Ce(n,"cur_attach_plugBackLenSE",e.updateStartOffset),Ce(n,"svgShow",e.updateShow),ue&&(Ce(n,"new_edge4viewBox",e.adjustEdge),De(n,{}))},removeOption:function(e,t){var n=t.props,a={};a[t.optionName]="",Ze(n,a)},remove:function(t){t.boundTargets.length&&(console.error("LeaderLineAttachment was not unbound by remove"),t.boundTargets.forEach(function(e){y.pathLabel.unbind(t,e)}))}}},Object.keys(y).forEach(function(e){Ye[e]=function(){return new S(y[e],Array.prototype.slice.call(arguments))}}),Ye.positionByWindowResize=!0,window.addEventListener("resize",O.add(function(){Ye.positionByWindowResize&&Object.keys(K).forEach(function(e){De(K[e],{position:!0})})}),!1),Ye}(); \ No newline at end of file
diff --git a/examples/program_analysis/ui/plain-draggable.min.js b/examples/program_analysis/ui/plain-draggable.min.js
new file mode 100644
index 0000000..8615cf0
--- /dev/null
+++ b/examples/program_analysis/ui/plain-draggable.min.js
@@ -0,0 +1,2 @@
+/*! PlainDraggable v2.5.12 (c) anseki https://anseki.github.io/plain-draggable/ */
+var PlainDraggable=function(t){var e={};function n(r){if(e[r])return e[r].exports;var o=e[r]={i:r,l:!1,exports:{}};return t[r].call(o.exports,o,o.exports,n),o.l=!0,o.exports}return n.m=t,n.c=e,n.d=function(t,e,r){n.o(t,e)||Object.defineProperty(t,e,{enumerable:!0,get:r})},n.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},n.t=function(t,e){if(1&e&&(t=n(t)),8&e)return t;if(4&e&&"object"==typeof t&&t&&t.__esModule)return t;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:t}),2&e&&"string"!=typeof t)for(var o in t)n.d(r,o,function(e){return t[e]}.bind(null,o));return r},n.n=function(t){var e=t&&t.__esModule?function(){return t.default}:function(){return t};return n.d(e,"a",e),e},n.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},n.p="",n(n.s=0)}([function(t,e,n){"use strict";n.r(e);var r=500,o=[],i=window.requestAnimationFrame||window.mozRequestAnimationFrame||window.webkitRequestAnimationFrame||window.msRequestAnimationFrame||function(t){return setTimeout(t,1e3/60)},a=window.cancelAnimationFrame||window.mozCancelAnimationFrame||window.webkitCancelAnimationFrame||window.msCancelAnimationFrame||function(t){return clearTimeout(t)},l=Date.now(),u=void 0;function s(){var t=void 0,e=void 0;u&&(a.call(window,u),u=null),o.forEach(function(e){var n;(n=e.event)&&(e.event=null,e.listener(n),t=!0)}),t?(l=Date.now(),e=!0):Date.now()-l<r&&(e=!0),e&&(u=i.call(window,s))}function d(t){var e=-1;return o.some(function(n,r){return n.listener===t&&(e=r,!0)}),e}var c={add:function(t){var e=void 0;return-1===d(t)?(o.push(e={listener:t}),function(t){e.event=t,u||s()}):null},remove:function(t){var e;(e=d(t))>-1&&(o.splice(e,1),!o.length&&u&&(a.call(window,u),u=null))}},f=function(){function t(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}return function(e,n,r){return n&&t(e.prototype,n),r&&t(e,r),e}}();var p=!1;try{window.addEventListener("test",null,Object.defineProperty({},"passive",{get:function(){p=!0}}))}catch(t){}function v(t,e,n,r){t.addEventListener(e,n,p?r:r.capture)}function h(t,e){if(null!=t&&null!=e)for(var n=0;n<t.length;n++)if(t[n].identifier===e)return t[n];return null}function m(t){return t&&"number"==typeof t.clientX&&"number"==typeof t.clientY}function g(t){t.preventDefault()}var y=function(){function t(e){var n=this;!function(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}(this,t),this.startHandlers={},this.lastHandlerId=0,this.curPointerClass=null,this.curTouchId=null,this.lastPointerXY={clientX:0,clientY:0},this.lastTouchTime=0,this.options={preventDefault:!0,stopPropagation:!0},e&&["preventDefault","stopPropagation"].forEach(function(t){"boolean"==typeof e[t]&&(n.options[t]=e[t])})}return f(t,[{key:"regStartHandler",value:function(t){var e=this;return e.startHandlers[++e.lastHandlerId]=function(n){var r="mousedown"===n.type?"mouse":"touch",o=Date.now(),i=void 0,a=void 0;if("touch"===r)e.lastTouchTime=o,i=n.changedTouches[0],a=n.changedTouches[0].identifier;else{if(o-e.lastTouchTime<400)return;i=n}if(!m(i))throw new Error("No clientX/clientY");e.curPointerClass&&e.cancel(),t.call(e,i)&&(e.curPointerClass=r,e.curTouchId="touch"===r?a:null,e.lastPointerXY.clientX=i.clientX,e.lastPointerXY.clientY=i.clientY,e.options.preventDefault&&n.preventDefault(),e.options.stopPropagation&&n.stopPropagation())},e.lastHandlerId}},{key:"unregStartHandler",value:function(t){delete this.startHandlers[t]}},{key:"addStartHandler",value:function(t,e){if(!this.startHandlers[e])throw new Error("Invalid handlerId: "+e);return v(t,"mousedown",this.startHandlers[e],{capture:!1,passive:!1}),v(t,"touchstart",this.startHandlers[e],{capture:!1,passive:!1}),v(t,"dragstart",g,{capture:!1,passive:!1}),e}},{key:"removeStartHandler",value:function(t,e){if(!this.startHandlers[e])throw new Error("Invalid handlerId: "+e);return t.removeEventListener("mousedown",this.startHandlers[e],!1),t.removeEventListener("touchstart",this.startHandlers[e],!1),t.removeEventListener("dragstart",g,!1),e}},{key:"addMoveHandler",value:function(t,e){var n=this,r=c.add(function(t){var e="mousemove"===t.type?"mouse":"touch";if("touch"===e&&(n.lastTouchTime=Date.now()),e===n.curPointerClass){var r="touch"===e?h(t.changedTouches,n.curTouchId):t;m(r)&&(r.clientX===n.lastPointerXY.clientX&&r.clientY===n.lastPointerXY.clientY||n.move(r),n.options.preventDefault&&t.preventDefault(),n.options.stopPropagation&&t.stopPropagation())}});v(t,"mousemove",r,{capture:!1,passive:!1}),v(t,"touchmove",r,{capture:!1,passive:!1}),n.curMoveHandler=e}},{key:"move",value:function(t){m(t)&&(this.lastPointerXY.clientX=t.clientX,this.lastPointerXY.clientY=t.clientY),this.curMoveHandler&&this.curMoveHandler(this.lastPointerXY)}},{key:"addEndHandler",value:function(t,e){var n=this;function r(t){var e="mouseup"===t.type?"mouse":"touch";if("touch"===e&&(n.lastTouchTime=Date.now()),e===n.curPointerClass){var r="touch"===e?h(t.changedTouches,n.curTouchId)||(h(t.touches,n.curTouchId)?null:{}):t;r&&(n.end(r),n.options.preventDefault&&t.preventDefault(),n.options.stopPropagation&&t.stopPropagation())}}v(t,"mouseup",r,{capture:!1,passive:!1}),v(t,"touchend",r,{capture:!1,passive:!1}),n.curEndHandler=e}},{key:"end",value:function(t){m(t)&&(this.lastPointerXY.clientX=t.clientX,this.lastPointerXY.clientY=t.clientY),this.curEndHandler&&this.curEndHandler(this.lastPointerXY),this.curPointerClass=this.curTouchId=null}},{key:"addCancelHandler",value:function(t,e){var n=this;v(t,"touchcancel",function(t){n.lastTouchTime=Date.now(),null!=n.curPointerClass&&(h(t.changedTouches,n.curTouchId)||!h(t.touches,n.curTouchId))&&n.cancel()},{capture:!1,passive:!1}),n.curCancelHandler=e}},{key:"cancel",value:function(){this.curCancelHandler&&this.curCancelHandler(),this.curPointerClass=this.curTouchId=null}}],[{key:"addEventListenerWithOptions",get:function(){return v}}]),t}();function x(t){return t.substr(0,1).toUpperCase()+t.substr(1)}var w=["webkit","moz","ms","o"],b=w.reduce(function(t,e){return t.push(e),t.push(x(e)),t},[]),S=w.map(function(t){return"-"+t+"-"}),E=function(){var t=void 0;return function(){return t=t||document.createElement("div").style}}(),T=function(){var t=new RegExp("^(?:"+w.join("|")+")(.)","i"),e=/[A-Z]/;return function(n){return"float"===(n=(n+"").replace(/\s/g,"").replace(/-([\da-z])/gi,function(t,e){return e.toUpperCase()}).replace(t,function(t,n){return e.test(n)?n.toLowerCase():t})).toLowerCase()?"cssFloat":n}}(),B=function(){var t=new RegExp("^(?:"+S.join("|")+")","i");return function(e){return(null!=e?e+"":"").replace(/\s/g,"").replace(t,"")}}(),C=function(t,e){var n=E();return t=t.replace(/[A-Z]/g,function(t){return"-"+t.toLowerCase()}),n.setProperty(t,e),null!=n[t]&&n.getPropertyValue(t)===e},O={},H={};function k(t){if((t=T(t))&&null==O[t]){var e=E();if(null!=e[t])O[t]=t;else{var n=x(t);b.some(function(r){var o=r+n;return null!=e[o]&&(O[t]=o,!0)})||(O[t]=!1)}}return O[t]||void 0}var P={getName:k,getValue:function(t,e){var n=void 0;return(t=k(t))?(H[t]=H[t]||{},(Array.isArray(e)?e:[e]).some(function(e){return e=B(e),null!=H[t][e]?!1!==H[t][e]&&(n=H[t][e],!0):C(t,e)?(n=H[t][e]=e,!0):!!S.some(function(r){var o=r+e;return!!C(t,o)&&(n=H[t][e]=o,!0)})||(H[t][e]=!1,!1)}),"string"==typeof n?n:void 0):n}};function I(t){return(t+"").trim()}function _(t,e){e.setAttribute("class",t.join(" "))}function D(t){return!D.ignoreNative&&t.classList||function(){var e=(t.getAttribute("class")||"").trim().split(/\s+/).filter(function(t){return!!t}),n={length:e.length,item:function(t){return e[t]},contains:function(t){return-1!==e.indexOf(I(t))},add:function(){return function(t,e,n){n.filter(function(e){return!(!(e=I(e))||-1!==t.indexOf(e)||(t.push(e),0))}).length&&_(t,e)}(e,t,Array.prototype.slice.call(arguments)),D.methodChain?n:void 0},remove:function(){return function(t,e,n){n.filter(function(e){var n=void 0;return!(!(e=I(e))||-1===(n=t.indexOf(e))||(t.splice(n,1),0))}).length&&_(t,e)}(e,t,Array.prototype.slice.call(arguments)),D.methodChain?n:void 0},toggle:function(n,r){return function(t,e,n,r){var o=t.indexOf(n=I(n));return-1!==o?!!r||(t.splice(o,1),_(t,e),!1):!1!==r&&(t.push(n),_(t,e),!0)}(e,t,n,r)},replace:function(r,o){return function(t,e,n,r){var o=void 0;(n=I(n))&&(r=I(r))&&n!==r&&-1!==(o=t.indexOf(n))&&(t.splice(o,1),-1===t.indexOf(r)&&t.push(r),_(t,e))}(e,t,r,o),D.methodChain?n:void 0}};return n}()}D.methodChain=!0;var X=D,Y=function(){function t(t,e){for(var n=0;n<e.length;n++){var r=e[n];r.enumerable=r.enumerable||!1,r.configurable=!0,"value"in r&&(r.writable=!0),Object.defineProperty(t,r.key,r)}}return function(e,n,r){return n&&t(e.prototype,n),r&&t(e,r),e}}(),L="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"==typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t};X.ignoreNative=!0;var A=9e3,F=20,W="tl",j="both",R="both",M="containment",z=["tl","tr","bl","br"],N=["start","end"],V=["inside","outside"],G=[40,200,1e3],q=[100,40,0],U="-ms-scroll-limit"in document.documentElement.style&&"-ms-ime-align"in document.documentElement.style&&!window.navigator.msPointerEnabled,Z=!U&&!!document.uniqueID,$="MozAppearance"in document.documentElement.style,J=!(U||$||!window.chrome||!window.CSS),K=!U&&!Z&&!$&&!J&&!window.chrome&&"WebkitAppearance"in document.documentElement.style,Q=function(){var t={}.toString,e={}.hasOwnProperty.toString,n=e.call(Object);return function(r){var o=void 0,i=void 0;return r&&"[object Object]"===t.call(r)&&(!(o=Object.getPrototypeOf(r))||(i=o.hasOwnProperty("constructor")&&o.constructor)&&"function"==typeof i&&e.call(i)===n)}}(),tt=Number.isFinite||function(t){return"number"==typeof t&&window.isFinite(t)},et={},nt={},rt=new y,ot=0,it=void 0,at=void 0,lt=void 0,ut=void 0,st=void 0,dt=void 0,ct=void 0,ft=void 0,pt=void 0,vt=void 0,ht=K?["all-scroll","move"]:["grab","all-scroll","move"],mt=K?"move":["grabbing","move"],gt="plain-draggable",yt="plain-draggable-dragging",xt="plain-draggable-moving",wt={},bt=window.requestAnimationFrame||window.mozRequestAnimationFrame||window.webkitRequestAnimationFrame||window.msRequestAnimationFrame||function(t){return setTimeout(t,1e3/60)},St=window.cancelAnimationFrame||window.mozCancelAnimationFrame||window.webkitCancelAnimationFrame||window.msCancelAnimationFrame||function(t){return clearTimeout(t)},Et=function(){var t=Date.now();["x","y"].forEach(function(e){var n=Bt[e];if(n){var r=t-n.lastFrameTime,o=Ot(Ct,e),i=null!=n.lastValue&&Math.abs(n.lastValue-o)<10?n.lastValue:o;if(-1===n.dir?i>n.min:i<n.max){var a=i+n.speed*r*n.dir;a<n.min?a=n.min:a>n.max&&(a=n.max),Ot(Ct,e,a),n.lastValue=a}n.lastFrameTime=t}})},Tt=function t(){St.call(window,Ht),Et(),Ht=bt.call(window,t)},Bt={},Ct=void 0,Ot=void 0,Ht=void 0;function kt(t,e,n){return null!=n&&("x"===e?t.scrollTo(n,t.pageYOffset):t.scrollTo(t.pageXOffset,n)),"x"===e?t.pageXOffset:t.pageYOffset}function Pt(t,e,n){var r="x"===e?"scrollLeft":"scrollTop";return null!=n&&(t[r]=n),t[r]}function It(t){return t?Q(t)?Object.keys(t).reduce(function(e,n){return e[n]=It(t[n]),e},{}):Array.isArray(t)?t.map(It):t:t}function _t(t,e){var n=void 0,r=void 0;return(void 0===t?"undefined":L(t))!==(void 0===e?"undefined":L(e))||(n=Q(t)?"obj":Array.isArray(t)?"array":"")!=(Q(e)?"obj":Array.isArray(e)?"array":"")||("obj"===n?_t(r=Object.keys(t).sort(),Object.keys(e).sort())||r.some(function(n){return _t(t[n],e[n])}):"array"===n?t.length!==e.length||t.some(function(t,n){return _t(t,e[n])}):t!==e)}function Dt(t){return!(!t||t.nodeType!==Node.ELEMENT_NODE||"function"!=typeof t.getBoundingClientRect||t.compareDocumentPosition(document)&Node.DOCUMENT_POSITION_DISCONNECTED)}function Xt(t){if(!Q(t))return null;var e=void 0;if(!tt(e=t.left)&&!tt(e=t.x))return null;if(t.left=t.x=e,!tt(e=t.top)&&!tt(e=t.y))return null;if(t.top=t.y=e,tt(t.width)&&t.width>=0)t.right=t.left+t.width;else{if(!(tt(t.right)&&t.right>=t.left))return null;t.width=t.right-t.left}if(tt(t.height)&&t.height>=0)t.bottom=t.top+t.height;else{if(!(tt(t.bottom)&&t.bottom>=t.top))return null;t.height=t.bottom-t.top}return t}function Yt(t){return tt(t)?{value:t,isRatio:!1}:"string"==typeof t?function(t){var e=/^(.+?)(%)?$/.exec(t),n=void 0,r=void 0;return e&&tt(n=parseFloat(e[1]))?{value:(r=!(!e[2]||!n))?n/100:n,isRatio:r}:null}(t.replace(/\s/g,"")):null}function Lt(t){return t.isRatio?100*t.value+"%":t.value}function At(t,e,n){return"number"==typeof t?t:e+t.value*(t.isRatio?n:1)}function Ft(t){if(!Q(t))return null;var e=void 0;if(!(e=Yt(t.left))&&!(e=Yt(t.x)))return null;if(t.left=t.x=e,!(e=Yt(t.top))&&!(e=Yt(t.y)))return null;if(t.top=t.y=e,(e=Yt(t.width))&&e.value>=0)t.width=e,delete t.right;else{if(!(e=Yt(t.right)))return null;t.right=e,delete t.width}if((e=Yt(t.height))&&e.value>=0)t.height=e,delete t.bottom;else{if(!(e=Yt(t.bottom)))return null;t.bottom=e,delete t.height}return t}function Wt(t){return Object.keys(t).reduce(function(e,n){return e[n]=Lt(t[n]),e},{})}function jt(t,e){var n={left:"x",right:"x",x:"x",width:"x",top:"y",bottom:"y",y:"y",height:"y"},r={x:e.left,y:e.top},o={x:e.width,y:e.height};return Xt(Object.keys(t).reduce(function(e,i){return e[i]=At(t[i],"width"===i||"height"===i?0:r[n[i]],o[n[i]]),e},{}))}function Rt(t,e){var n=t.getBoundingClientRect(),r={left:n.left,top:n.top,width:n.width,height:n.height};if(r.left+=window.pageXOffset,r.top+=window.pageYOffset,e){var o=window.getComputedStyle(t,""),i=parseFloat(o.borderTopWidth)||0,a=parseFloat(o.borderRightWidth)||0,l=parseFloat(o.borderBottomWidth)||0,u=parseFloat(o.borderLeftWidth)||0;r.left+=u,r.top+=i,r.width-=u+a,r.height-=i+l}return Xt(r)}function Mt(t,e){null==ut&&(!1!==ht&&(ut=P.getValue("cursor",ht)),null==ut&&(ut=!1)),t.style.cursor=!1===ut?e:ut}function zt(t){null==st&&(!1!==mt&&(st=P.getValue("cursor",mt)),null==st&&(st=!1)),!1!==st&&(t.style.cursor=st)}function Nt(t,e,n){var r=t.svgPoint;return r.x=e,r.y=n,r.matrixTransform(t.svgCtmElement.getScreenCTM().inverse())}function Vt(t,e){var n=t.elementBBox;if(e.left!==n.left||e.top!==n.top){var r=t.htmlOffset;return t.elementStyle[ft]="translate("+(e.left+r.left)+"px, "+(e.top+r.top)+"px)",!0}return!1}function Gt(t,e){var n=t.elementBBox,r=t.elementStyle,o=t.htmlOffset,i=!1;return e.left!==n.left&&(r.left=e.left+o.left+"px",i=!0),e.top!==n.top&&(r.top=e.top+o.top+"px",i=!0),i}function qt(t,e){var n=t.elementBBox;if(e.left!==n.left||e.top!==n.top){var r=t.svgOffset,o=t.svgOriginBBox,i=Nt(t,e.left-window.pageXOffset,e.top-window.pageYOffset);return t.svgTransform.setTranslate(i.x+r.x-o.x,i.y+r.y-o.y),!0}return!1}function Ut(t,e,n){var r=t.elementBBox;function o(){t.minLeft>=t.maxLeft?e.left=r.left:e.left<t.minLeft?e.left=t.minLeft:e.left>t.maxLeft&&(e.left=t.maxLeft),t.minTop>=t.maxTop?e.top=r.top:e.top<t.minTop?e.top=t.minTop:e.top>t.maxTop&&(e.top=t.maxTop)}if(o(),n){if(!1===n(e))return!1;o()}var i=t.moveElm(t,e);return i&&(t.elementBBox=Xt({left:e.left,top:e.top,width:r.width,height:r.height})),i}function Zt(t){var e=t.element,n=t.elementStyle,r=Rt(e),o=["display","marginTop","marginBottom","width","height"];o.unshift(ft);var i=n[ct];n[ct]="none";var a=Rt(e);t.orgStyle?o.forEach(function(e){null!=t.lastStyle[e]&&n[e]!==t.lastStyle[e]||(n[e]=t.orgStyle[e])}):(t.orgStyle=o.reduce(function(t,e){return t[e]=n[e]||"",t},{}),t.lastStyle={});var l=Rt(e),u=window.getComputedStyle(e,"");"inline"===u.display&&(n.display="inline-block",["Top","Bottom"].forEach(function(t){var e=parseFloat(u["padding"+t]);n["margin"+t]=e?"-"+e+"px":"0"})),n[ft]="translate(0, 0)";var s=Rt(e),d=t.htmlOffset={left:s.left?-s.left:0,top:s.top?-s.top:0};return n[ft]="translate("+(r.left+d.left)+"px, "+(r.top+d.top)+"px)",["width","height"].forEach(function(r){s[r]!==l[r]&&(n[r]=l[r]+"px",(s=Rt(e))[r]!==l[r]&&(n[r]=l[r]-(s[r]-l[r])+"px")),t.lastStyle[r]=n[r]}),e.offsetWidth,n[ct]=i,a.left===r.left&&a.top===r.top||(n[ft]="translate("+(a.left+d.left)+"px, "+(a.top+d.top)+"px)"),a}function $t(t){var e=t.element,n=t.elementStyle,r=Rt(e),o=["position","marginTop","marginRight","marginBottom","marginLeft","width","height"],i=n[ct];n[ct]="none";var a=Rt(e);t.orgStyle?o.forEach(function(e){null!=t.lastStyle[e]&&n[e]!==t.lastStyle[e]||(n[e]=t.orgStyle[e])}):(t.orgStyle=o.reduce(function(t,e){return t[e]=n[e]||"",t},{}),t.lastStyle={});var l=Rt(e);n.position="absolute",n.left=n.top=n.margin="0";var u=Rt(e),s=t.htmlOffset={left:u.left?-u.left:0,top:u.top?-u.top:0};return n.left=r.left+s.left+"px",n.top=r.top+s.top+"px",["width","height"].forEach(function(r){u[r]!==l[r]&&(n[r]=l[r]+"px",(u=Rt(e))[r]!==l[r]&&(n[r]=l[r]-(u[r]-l[r])+"px")),t.lastStyle[r]=n[r]}),e.offsetWidth,n[ct]=i,a.left===r.left&&a.top===r.top||(n.left=a.left+s.left+"px",n.top=a.top+s.top+"px"),a}function Jt(t){var e=t.element,n=t.svgTransform,r=e.getBoundingClientRect(),o=Rt(e);n.setTranslate(0,0);var i=t.svgOriginBBox=e.getBBox(),a=e.getBoundingClientRect(),l=Nt(t,a.left,a.top),u=t.svgOffset={x:i.x-l.x,y:i.y-l.y},s=Nt(t,r.left,r.top);return n.setTranslate(s.x+u.x-i.x,s.y+u.y-i.y),o}function Kt(t,e){var n=Rt(document.documentElement),r=t.elementBBox=t.initElm(t),o=t.containmentBBox=t.containmentIsBBox?jt(t.options.containment,n)||n:Rt(t.options.containment,!0);if(t.minLeft=o.left,t.maxLeft=o.right-r.width,t.minTop=o.top,t.maxTop=o.bottom-r.height,Ut(t,{left:r.left,top:r.top}),t.parsedSnapTargets){var i={x:r.width,y:r.height},a={x:t.minLeft,y:t.minTop},l={x:t.maxLeft,y:t.maxTop},u={left:"x",right:"x",x:"x",width:"x",xStart:"x",xEnd:"x",xStep:"x",top:"y",bottom:"y",y:"y",height:"y",yStart:"y",yEnd:"y",yStep:"y"},s=t.parsedSnapTargets.reduce(function(t,e){var s="containment"===e.base?o:n,d={x:s.left,y:s.top},c={x:s.width,y:s.height};function f(n){if(null==n.center&&(n.center=e.center),null==n.xGravity&&(n.xGravity=e.gravity),null==n.yGravity&&(n.yGravity=e.gravity),null!=n.x&&null!=n.y)n.x=At(n.x,d.x,c.x),n.y=At(n.y,d.y,c.y),n.center&&(n.x-=i.x/2,n.y-=i.y/2,n.corners=["tl"]),(n.corners||e.corners).forEach(function(e){var r=n.x-("tr"===e||"br"===e?i.x:0),o=n.y-("bl"===e||"br"===e?i.y:0);if(r>=a.x&&r<=l.x&&o>=a.y&&o<=l.y){var u={x:r,y:o},s=r-n.xGravity,d=r+n.xGravity,c=o-n.yGravity,f=o+n.yGravity;s>a.x&&(u.gravityXStart=s),d<l.x&&(u.gravityXEnd=d),c>a.y&&(u.gravityYStart=c),f<l.y&&(u.gravityYEnd=f),t.push(u)}});else{var r=null!=n.x?"x":"y",o="x"===r?"y":"x",u=o+"Start",s=o+"End",f=r+"Gravity",p=r.toUpperCase(),v=o.toUpperCase(),h="gravity"+p+"Start",m="gravity"+p+"End",g="gravity"+v+"Start",y="gravity"+v+"End";if(n[r]=At(n[r],d[r],c[r]),n[u]=At(n[u],d[o],c[o]),n[s]=At(n[s],d[o],c[o])-i[o],n[u]>n[s]||n[u]>l[o]||n[s]<a[o])return;n.center&&(n[r]-=i[r]/2,n.sides=["start"]),(n.sides||e.sides).forEach(function(e){var d=n[r]-("end"===e?i[r]:0);if(d>=a[r]&&d<=l[r]){var c={},p=d-n[f],v=d+n[f];c[r]=d,p>a[r]&&(c[h]=p),v<l[r]&&(c[m]=v),n[u]>a[o]&&(c[g]=n[u]),n[s]<l[o]&&(c[y]=n[s]),t.push(c)}})}}var p=void 0;if((p=e.element?Rt(e.element):null)||e.ppBBox)e.ppBBox&&(p=jt(e.ppBBox,s)),p&&e.edges.forEach(function(t){var n=e.gravity,o=e.gravity;"outside"===t&&(n+=r.width,o+=r.height);var i=p.left-n,a=p.right+n,l=p.top-o,u=p.bottom+o,s="inside"===t?"start":"end";f({xStart:i,xEnd:a,y:p.top,sides:[s],center:!1}),f({x:p.left,yStart:l,yEnd:u,sides:[s],center:!1}),s="inside"===t?"end":"start",f({xStart:i,xEnd:a,y:p.bottom,sides:[s],center:!1}),f({x:p.right,yStart:l,yEnd:u,sides:[s],center:!1})});else{var v=[["x","y","xStart","xEnd","xStep","yStart","yEnd","yStep"].reduce(function(t,n){return e[n]&&(t[n]=At(e[n],"xStep"===n||"yStep"===n?0:d[u[n]],c[u[n]])),t},{})];["x","y"].forEach(function(t){var n=t+"Start",r=t+"End",o=t+"Step",i=t+"Gravity";v=v.reduce(function(a,l){var u=l[n],s=l[r],d=l[o];if(null!=u&&null!=s&&u>=s)return a;if(null!=d){if(d<2)return a;var c=d/2;c=e.gravity>c?c:null;for(var f=u;f<=s;f+=d){var p=Object.keys(l).reduce(function(t,e){return e!==n&&e!==r&&e!==o&&(t[e]=l[e]),t},{});p[t]=f,p[i]=c,a.push(p)}}else a.push(l);return a},[])}),v.forEach(function(t){f(t)})}return t},[]);t.snapTargets=s.length?s:null}var d={},c=t.options.autoScroll;if(c){d.isWindow=c.target===window,d.target=c.target;var f="scroll"===e,p=function(t,e,n){var r={},o=void 0,i=void 0,a=void 0;!function(t){r.clientWidth=t.clientWidth,r.clientHeight=t.clientHeight}(e?document.documentElement:t);var l=0,u=0;if(!n){var s=void 0,d=void 0;e?(s=kt(t,"x"),d=kt(t,"y"),o=getComputedStyle(document.documentElement,""),i=getComputedStyle(document.body,""),l=kt(t,"x",document.documentElement.scrollWidth+r.clientWidth+["marginLeft","marginRight","borderLeftWidth","borderRightWidth","paddingLeft","paddingRight"].reduce(function(t,e){return t+(parseFloat(o[e])||0)+(parseFloat(i[e])||0)},0)),u=kt(t,"y",document.documentElement.scrollHeight+r.clientHeight+["marginTop","marginBottom","borderTopWidth","borderBottomWidth","paddingTop","paddingBottom"].reduce(function(t,e){return t+(parseFloat(o[e])||0)+(parseFloat(i[e])||0)},0)),kt(t,"x",s),kt(t,"y",d)):(s=Pt(t,"x"),d=Pt(t,"y"),a=getComputedStyle(t,""),l=Pt(t,"x",t.scrollWidth+r.clientWidth+["marginLeft","marginRight","borderLeftWidth","borderRightWidth","paddingLeft","paddingRight"].reduce(function(t,e){return t+(parseFloat(a[e])||0)},0)),u=Pt(t,"y",t.scrollHeight+r.clientHeight+["marginTop","marginBottom","borderTopWidth","borderBottomWidth","paddingTop","paddingBottom"].reduce(function(t,e){return t+(parseFloat(a[e])||0)},0)),Pt(t,"x",s),Pt(t,"y",d))}r.scrollWidth=r.clientWidth+l,r.scrollHeight=r.clientHeight+u;var c=void 0;return e?r.clientX=r.clientY=0:(c=t.getBoundingClientRect(),a||(a=getComputedStyle(t,"")),r.clientX=c.left+(parseFloat(a.borderLeftWidth)||0),r.clientY=c.top+(parseFloat(a.borderTopWidth)||0)),r}(c.target,d.isWindow,f),v=Xt({left:p.clientX,top:p.clientY,width:p.clientWidth,height:p.clientHeight});f?t.autoScroll&&(d.scrollWidth=t.autoScroll.scrollWidth,d.scrollHeight=t.autoScroll.scrollHeight):(d.scrollWidth=p.scrollWidth,d.scrollHeight=p.scrollHeight),[["X","Width","left","right"],["Y","Height","top","bottom"]].forEach(function(t){var e=t[0],n=t[1],o=t[2],i=t[3],a=(d["scroll"+n]||0)-p["client"+n],l=c["min"+e]||0,u=tt(c["max"+e])?c["max"+e]:a;if(l<u&&l<a){u>a&&(u=a);for(var s=[],f=r[n.toLowerCase()],h=c.sensitivity.length-1;h>=0;h--){var m=c.sensitivity[h],g=c.speed[h];s.push({dir:-1,speed:g,position:v[o]+m}),s.push({dir:1,speed:g,position:v[i]-m-f})}d[e.toLowerCase()]={min:l,max:u,lines:s}}})}t.autoScroll=d.x||d.y?d:null}function Qt(t){wt.stop(),Mt(t.options.handle,t.orgCursor),lt.style.cursor=dt,!1!==t.options.zIndex&&(t.elementStyle.zIndex=t.orgZIndex),pt&&(lt.style[pt]=vt);var e=X(t.element);xt&&e.remove(xt),yt&&e.remove(yt),it=null,rt.cancel(),t.onDragEnd&&t.onDragEnd({left:t.elementBBox.left,top:t.elementBBox.top})}function te(t,e){var n=t.options,r=void 0;if(e.containment){var o=void 0;Dt(e.containment)?e.containment!==n.containment&&(n.containment=e.containment,t.containmentIsBBox=!1,r=!0):(o=Ft(It(e.containment)))&&_t(o,n.containment)&&(n.containment=o,t.containmentIsBBox=!0,r=!0)}function i(t,e){function n(t){return"string"==typeof t?t.replace(/[, ]+/g," ").trim().toLowerCase():null}tt(e.gravity)&&e.gravity>0&&(t.gravity=e.gravity);var r=n(e.corner);if(r){if("all"!==r){var o={},i=r.split(/\s/).reduce(function(t,e){return(e="tl"===(e=e.trim().replace(/^(.).*?-(.).*$/,"$1$2"))||"lt"===e?"tl":"tr"===e||"rt"===e?"tr":"bl"===e||"lb"===e?"bl":"br"===e||"rb"===e?"br":null)&&!o[e]&&(t.push(e),o[e]=!0),t},[]),a=i.length;r=a?4===a?"all":i.join(" "):null}r&&(t.corner=r)}var l=n(e.side);l&&("start"===l||"end"===l||"both"===l?t.side=l:"start end"!==l&&"end start"!==l||(t.side="both")),"boolean"==typeof e.center&&(t.center=e.center);var u=n(e.edge);u&&("inside"===u||"outside"===u||"both"===u?t.edge=u:"inside outside"!==u&&"outside inside"!==u||(t.edge="both"));var s="string"==typeof e.base?e.base.trim().toLowerCase():null;return!s||"containment"!==s&&"document"!==s||(t.base=s),t}if(null!=e.snap){var a=Q(e.snap)&&null!=e.snap.targets?e.snap:{targets:e.snap},l=[],u=i({targets:l},a);u.gravity||(u.gravity=F),u.corner||(u.corner=W),u.side||(u.side=j),"boolean"!=typeof u.center&&(u.center=!1),u.edge||(u.edge=R),u.base||(u.base=M);var s=(Array.isArray(a.targets)?a.targets:[a.targets]).reduce(function(t,e){if(null==e)return t;var n=Dt(e),r=Ft(It(e)),o=n||r?{boundingBox:e}:Q(e)&&null==e.start&&null==e.end&&null==e.step?e:{x:e,y:e},a=[],s={},d=o.boundingBox,c=void 0;if(n||Dt(d))a.push({element:d}),s.boundingBox=d;else if(c=r||Ft(It(d)))a.push({ppBBox:c}),s.boundingBox=Wt(c);else{var f=void 0,p=["x","y"].reduce(function(t,e){var n,r=o[e];if(n=Yt(r))t[e]=n,s[e]=Lt(n);else{var i=void 0,a=void 0,l=void 0;Q(r)&&(i=Yt(r.start),a=Yt(r.end),l=Yt(r.step),i&&a&&i.isRatio===a.isRatio&&i.value>=a.value&&(f=!0)),i=t[e+"Start"]=i||{value:0,isRatio:!1},a=t[e+"End"]=a||{value:1,isRatio:!0},s[e]={start:Lt(i),end:Lt(a)},l&&((l.isRatio?l.value>0:l.value>=2)?(t[e+"Step"]=l,s[e].step=Lt(l)):f=!0)}return t},{});if(f)return t;p.xStart&&!p.xStep&&p.yStart&&!p.yStep?a.push({xStart:p.xStart,xEnd:p.xEnd,y:p.yStart},{xStart:p.xStart,xEnd:p.xEnd,y:p.yEnd},{x:p.xStart,yStart:p.yStart,yEnd:p.yEnd},{x:p.xEnd,yStart:p.yStart,yEnd:p.yEnd}):a.push(p)}if(a.length){l.push(i(s,o));var v=s.corner||u.corner,h=s.side||u.side,m=s.edge||u.edge,g={gravity:s.gravity||u.gravity,base:s.base||u.base,center:"boolean"==typeof s.center?s.center:u.center,corners:"all"===v?z:v.split(" "),sides:"both"===h?N:[h],edges:"both"===m?V:[m]};a.forEach(function(e){["gravity","corners","sides","center","edges","base"].forEach(function(t){e[t]=g[t]}),t.push(e)})}return t},[]);s.length&&(n.snap=u,_t(s,t.parsedSnapTargets)&&(t.parsedSnapTargets=s,r=!0))}else e.hasOwnProperty("snap")&&t.parsedSnapTargets&&(n.snap=t.parsedSnapTargets=t.snapTargets=void 0);if(e.autoScroll){var d=Q(e.autoScroll)?e.autoScroll:{target:!0===e.autoScroll?window:e.autoScroll},c={};c.target=Dt(d.target)?d.target:window,c.speed=[],(Array.isArray(d.speed)?d.speed:[d.speed]).every(function(t,e){return!!(e<=2&&tt(t))&&(c.speed[e]=t,!0)}),c.speed.length||(c.speed=G);var f=Array.isArray(d.sensitivity)?d.sensitivity:[d.sensitivity];c.sensitivity=c.speed.map(function(t,e){return tt(f[e])?f[e]:q[e]}),["X","Y"].forEach(function(t){var e="min"+t,n="max"+t;tt(d[e])&&d[e]>=0&&(c[e]=d[e]),tt(d[n])&&d[n]>=0&&(!c[e]||d[n]>=c[e])&&(c[n]=d[n])}),_t(c,n.autoScroll)&&(n.autoScroll=c,r=!0)}else e.hasOwnProperty("autoScroll")&&(n.autoScroll&&(r=!0),n.autoScroll=void 0);if(r&&Kt(t),Dt(e.handle)&&e.handle!==n.handle){n.handle&&(n.handle.style.cursor=t.orgCursor,pt&&(n.handle.style[pt]=t.orgUserSelect),rt.removeStartHandler(n.handle,t.pointerEventHandlerId));var p=n.handle=e.handle;t.orgCursor=p.style.cursor,Mt(p,t.orgCursor),pt&&(t.orgUserSelect=p.style[pt],p.style[pt]="none"),rt.addStartHandler(p,t.pointerEventHandlerId)}(tt(e.zIndex)||!1===e.zIndex)&&(n.zIndex=e.zIndex,t===it&&(t.elementStyle.zIndex=!1===n.zIndex?t.orgZIndex:n.zIndex));var v={left:t.elementBBox.left,top:t.elementBBox.top},h=void 0;tt(e.left)&&e.left!==v.left&&(v.left=e.left,h=!0),tt(e.top)&&e.top!==v.top&&(v.top=e.top,h=!0),h&&Ut(t,v),["onDrag","onMove","onDragStart","onMoveStart","onDragEnd"].forEach(function(r){"function"==typeof e[r]?(n[r]=e[r],t[r]=n[r].bind(t.ins)):e.hasOwnProperty(r)&&null==e[r]&&(n[r]=t[r]=void 0)})}wt.move=function(t,e,n){St.call(window,Ht),Et(),Ct===t&&(e.x&&Bt.x&&(e.x.lastValue=Bt.x.lastValue),e.y&&Bt.y&&(e.y.lastValue=Bt.y.lastValue)),Ct=t,Bt=e,Ot=n;var r=Date.now();["x","y"].forEach(function(t){var e=Bt[t];e&&(e.lastFrameTime=r)}),Ht=bt.call(window,Tt)},wt.stop=function(){St.call(window,Ht),Et(),Bt={},Ct=null};var ee=function(){function t(e,n){!function(t,e){if(!(t instanceof e))throw new TypeError("Cannot call a class as a function")}(this,t);var r={ins:this,options:{zIndex:A},disabled:!1};if(Object.defineProperty(this,"_id",{value:++ot}),r._id=this._id,et[this._id]=r,!Dt(e)||e===lt)throw new Error("This element is not accepted.");if(n){if(!Q(n))throw new Error("Invalid options.")}else n={};var o=!0,i=void 0;if(e instanceof SVGElement&&(i=e.ownerSVGElement)){if(!e.getBBox)throw new Error("This element is not accepted. (SVGLocatable)");if(!e.transform)throw new Error("This element is not accepted. (SVGAnimatedTransformList)");r.svgTransform=e.transform.baseVal.appendItem(i.createSVGTransform()),r.svgPoint=i.createSVGPoint();var a=e.nearestViewportElement;r.svgCtmElement=$?a.appendChild(document.createElementNS(i.namespaceURI,"rect")):a,o=!1,r.initElm=Jt,r.moveElm=qt}else{var l=P.getName("willChange");l&&(o=!1),!n.leftTop&&ft?(l&&(e.style[l]="transform"),r.initElm=Zt,r.moveElm=Vt):(l&&(e.style[l]="left, top"),r.initElm=$t,r.moveElm=Gt)}if(r.element=function(t,e){var n=t.style;n.webkitTapHighlightColor="transparent";var r=P.getName("boxShadow"),o=window.getComputedStyle(t,"")[r];return o&&"none"!==o||(n[r]="0 0 1px transparent"),e&&ft&&(n[ft]="translateZ(0)"),t}(e,o),r.elementStyle=e.style,r.orgZIndex=r.elementStyle.zIndex,gt&&X(e).add(gt),r.pointerEventHandlerId=rt.regStartHandler(function(t){return function(t,e){return!(t.disabled||t.onDragStart&&!1===t.onDragStart(e)||(it&&Qt(it),zt(t.options.handle),lt.style.cursor=st||window.getComputedStyle(t.options.handle,"").cursor,!1!==t.options.zIndex&&(t.elementStyle.zIndex=t.options.zIndex),pt&&(lt.style[pt]="none"),yt&&X(t.element).add(yt),it=t,at=!1,nt.left=t.elementBBox.left-(e.clientX+window.pageXOffset),nt.top=t.elementBBox.top-(e.clientY+window.pageYOffset),0))}(r,t)}),!n.containment){var u;n.containment=(u=e.parentNode)&&Dt(u)?u:lt}n.handle||(n.handle=e),te(r,n)}return Y(t,[{key:"remove",value:function(){var t=et[this._id];this.disabled=!0,rt.unregStartHandler(rt.removeStartHandler(t.options.handle,t.pointerEventHandlerId)),delete et[this._id]}},{key:"setOptions",value:function(t){return Q(t)&&te(et[this._id],t),this}},{key:"position",value:function(){return Kt(et[this._id]),this}},{key:"disabled",get:function(){return et[this._id].disabled},set:function(t){var e=et[this._id];(t=!!t)!==e.disabled&&(e.disabled=t,e.disabled?(e===it&&Qt(e),e.options.handle.style.cursor=e.orgCursor,pt&&(e.options.handle.style[pt]=e.orgUserSelect),gt&&X(e.element).remove(gt)):(Mt(e.options.handle,e.orgCursor),pt&&(e.options.handle.style[pt]="none"),gt&&X(e.element).add(gt)))}},{key:"element",get:function(){return et[this._id].element}},{key:"rect",get:function(){return It(et[this._id].elementBBox)}},{key:"left",get:function(){return et[this._id].elementBBox.left},set:function(t){te(et[this._id],{left:t})}},{key:"top",get:function(){return et[this._id].elementBBox.top},set:function(t){te(et[this._id],{top:t})}},{key:"containment",get:function(){var t=et[this._id];return t.containmentIsBBox?Wt(t.options.containment):t.options.containment},set:function(t){te(et[this._id],{containment:t})}},{key:"snap",get:function(){return It(et[this._id].options.snap)},set:function(t){te(et[this._id],{snap:t})}},{key:"autoScroll",get:function(){return It(et[this._id].options.autoScroll)},set:function(t){te(et[this._id],{autoScroll:t})}},{key:"handle",get:function(){return et[this._id].options.handle},set:function(t){te(et[this._id],{handle:t})}},{key:"zIndex",get:function(){return et[this._id].options.zIndex},set:function(t){te(et[this._id],{zIndex:t})}},{key:"onDrag",get:function(){return et[this._id].options.onDrag},set:function(t){te(et[this._id],{onDrag:t})}},{key:"onMove",get:function(){return et[this._id].options.onMove},set:function(t){te(et[this._id],{onMove:t})}},{key:"onDragStart",get:function(){return et[this._id].options.onDragStart},set:function(t){te(et[this._id],{onDragStart:t})}},{key:"onMoveStart",get:function(){return et[this._id].options.onMoveStart},set:function(t){te(et[this._id],{onMoveStart:t})}},{key:"onDragEnd",get:function(){return et[this._id].options.onDragEnd},set:function(t){te(et[this._id],{onDragEnd:t})}}],[{key:"draggableCursor",get:function(){return ht},set:function(t){ht!==t&&(ht=t,ut=null,Object.keys(et).forEach(function(t){var e=et[t];e.disabled||e===it&&!1!==st||(Mt(e.options.handle,e.orgCursor),e===it&&(lt.style.cursor=dt,lt.style.cursor=window.getComputedStyle(e.options.handle,"").cursor))}))}},{key:"draggingCursor",get:function(){return mt},set:function(t){mt!==t&&(mt=t,st=null,it&&(zt(it.options.handle),!1===st&&(Mt(it.options.handle,it.orgCursor),lt.style.cursor=dt),lt.style.cursor=st||window.getComputedStyle(it.options.handle,"").cursor))}},{key:"draggableClass",get:function(){return gt},set:function(t){(t=t?t+"":void 0)!==gt&&(Object.keys(et).forEach(function(e){var n=et[e];if(!n.disabled){var r=X(n.element);gt&&r.remove(gt),t&&r.add(t)}}),gt=t)}},{key:"draggingClass",get:function(){return yt},set:function(t){if((t=t?t+"":void 0)!==yt){if(it){var e=X(it.element);yt&&e.remove(yt),t&&e.add(t)}yt=t}}},{key:"movingClass",get:function(){return xt},set:function(t){if((t=t?t+"":void 0)!==xt){if(it&&at){var e=X(it.element);xt&&e.remove(xt),t&&e.add(t)}xt=t}}}]),t}();rt.addMoveHandler(document,function(t){if(it){var e={left:t.clientX+window.pageXOffset+nt.left,top:t.clientY+window.pageYOffset+nt.top};if(Ut(it,e,it.snapTargets?function(t){var e=it.snapTargets.length,n=!1,r=!1,o=void 0;for(o=0;o<e&&(!n||!r);o++){var i=it.snapTargets[o];(null==i.gravityXStart||t.left>=i.gravityXStart)&&(null==i.gravityXEnd||t.left<=i.gravityXEnd)&&(null==i.gravityYStart||t.top>=i.gravityYStart)&&(null==i.gravityYEnd||t.top<=i.gravityYEnd)&&(n||null==i.x||(t.left=i.x,n=!0,o=-1),r||null==i.y||(t.top=i.y,r=!0,o=-1))}return t.snapped=n||r,!it.onDrag||it.onDrag(t)}:it.onDrag)){var n={},r=it.autoScroll;if(r){var o={x:it.elementBBox.left-window.pageXOffset,y:it.elementBBox.top-window.pageYOffset};["x","y"].forEach(function(t){if(r[t]){var e=r[t].min,i=r[t].max;r[t].lines.some(function(r){return(-1===r.dir?o[t]<=r.position:o[t]>=r.position)&&(n[t]={dir:r.dir,speed:r.speed/1e3,min:e,max:i},!0)})}})}n.x||n.y?(wt.move(r.target,n,r.isWindow?kt:Pt),e.autoScroll=!0):wt.stop(),at||(at=!0,xt&&X(it.element).add(xt),it.onMoveStart&&it.onMoveStart(e)),it.onMove&&it.onMove(e)}}});var ne=function(){it&&Qt(it)};rt.addEndHandler(document,ne),rt.addCancelHandler(document,ne);var re=function(){ct=P.getName("transitionProperty"),ft=P.getName("transform"),dt=lt.style.cursor,(pt=P.getName("userSelect"))&&(vt=lt.style[pt]);var t={},e=void 0;function n(t,e){t.initElm&&Kt(t,e)}var r=!1,o=c.add(function(o){r||(r=!0,it&&(n(it,o.type),rt.move(),t[it._id]=!0),clearTimeout(e),e=setTimeout(function(){!function(r){clearTimeout(e),Object.keys(et).forEach(function(e){t[e]||n(et[e],r)}),t={}}(o.type)},200),r=!1)});window.addEventListener("resize",o,!0),window.addEventListener("scroll",o,!0)};(lt=document.body)?re():document.addEventListener("DOMContentLoaded",function(){lt=document.body,re()},!0);e.default=ee}]).default; \ No newline at end of file
diff --git a/examples/program_analysis/ui/serve.py b/examples/program_analysis/ui/serve.py
new file mode 100644
index 0000000..faf7567
--- /dev/null
+++ b/examples/program_analysis/ui/serve.py
@@ -0,0 +1,58 @@
+"""Starts a server displaying an interactive version of a code analogy."""
+import os
+import json
+import http.server
+import socketserver
+from ui.lazy_structure_parser import parse_lazy_structure
+
+PORT = 8001
+
+# https://stackoverflow.com/questions/18444395
+class RequestHandler(http.server.BaseHTTPRequestHandler):
+ """Server handler for code triplet structures."""
+ def do_GET(self):
+ """Serves static content and parsed structures."""
+
+ if self.path == "/Structure":
+ self.send_good_headers("application/json")
+ structure = parse_lazy_structure(self.structure)
+ self.wfile.write(json.dumps(structure).encode())
+ elif self.path.count("/") == 1:
+ path = os.environ.get("BUILD_WORKSPACE_DIRECTORY", ".")
+ path += f"/examples/program_analysis/ui{self.path}"
+ if self.path == "/":
+ path += "index.html"
+ try:
+ with open(path, "r") as disk_file:
+ data = disk_file.read().encode()
+ except OSError:
+ self.send_response(404)
+ else:
+ if ".css" in self.path:
+ self.send_good_headers("text/css")
+ else:
+ self.send_good_headers("text/html")
+ self.wfile.write(open(path).read().encode())
+ else:
+ self.send_response(404)
+
+ def send_good_headers(self, content_type):
+ """Send a 200 along with the given content_type."""
+ self.send_response(200)
+ self.send_header("Access-Control-Allow-Origin", "*")
+ self.send_header("Content-type", content_type)
+ self.end_headers()
+
+class ReuseAddrTCPServer(socketserver.TCPServer):
+ """Server allowing to start over an existing port.
+
+ https://stackoverflow.com/questions/15260558
+ """
+ allow_reuse_address = True
+
+def start_server(structure):
+ """Start serving an interactive version of the LazyStructure @structure."""
+ RequestHandler.structure = structure
+ with ReuseAddrTCPServer(("", PORT), RequestHandler) as httpd:
+ print(f"Result available at http://localhost:{PORT}")
+ httpd.serve_forever()
diff --git a/examples/program_analysis/ui/style.css b/examples/program_analysis/ui/style.css
new file mode 100644
index 0000000..33ba1a4
--- /dev/null
+++ b/examples/program_analysis/ui/style.css
@@ -0,0 +1,35 @@
+body {
+ vertical-align: top;
+ min-height: 1500px;
+}
+
+.document {
+ white-space: pre;
+ line-height: 1.5em;
+ font-family: mono;
+ font-size: 0.95em;
+ position: relative;
+ border: 3px solid red;
+ display: inline-block;
+ padding: 10px;
+ margin-left: 5px;
+ vertical-align: top;
+}
+ .chunk-in-structure {
+ border: 3px solid gray;
+ }
+ .chunk-in-map {
+ border: 3px solid green;
+ }
+ .highlight {
+ background-color: green;
+ }
+.document.generated {
+ border-color: blue;
+}
+ .document.generated .chunk-in-map {
+ border-color: blue;
+ }
+ .document.generated .highlight {
+ background-color: blue;
+ }
diff --git a/examples/turing_machine/BUILD b/examples/turing_machine/BUILD
new file mode 100644
index 0000000..c8b90b4
--- /dev/null
+++ b/examples/turing_machine/BUILD
@@ -0,0 +1,19 @@
+py_binary(
+ name = "turing_machine",
+ srcs = ["turing_machine.py"],
+ deps = [
+ "//:ts_lib",
+ "//:ts_utils",
+ "//runtime",
+ ],
+)
+
+py_test(
+ name = "test_turing_machine",
+ size = "small",
+ srcs = ["test_turing_machine.py"],
+ deps = [
+ ":turing_machine",
+ "@bazel_python//:pytest_helper",
+ ],
+)
diff --git a/examples/turing_machine/README.md b/examples/turing_machine/README.md
new file mode 100644
index 0000000..8155871
--- /dev/null
+++ b/examples/turing_machine/README.md
@@ -0,0 +1,14 @@
+# Turing Machine Example
+This directory contains code for simulating a Turing machine with `TSLang`. Its
+primary goal is to demonstrate the use and power of our rewrite rules.
+You can run the example from the root of the Sifter repository like so:
+```bash
+bazel run examples/turing_machine:turing_machine
+```
+It will print the proposed delta, corresponding to one step of the machine's
+execution.
+
+#### Files
+* `turing_machine.py` contains code defining the TM as a triplet structure.
+* `test_turing_machine.py` is a Pytest test which ensures that
+ `turing_machine.py` returns the correct result.
diff --git a/examples/turing_machine/test_turing_machine.py b/examples/turing_machine/test_turing_machine.py
new file mode 100644
index 0000000..bf2f9d2
--- /dev/null
+++ b/examples/turing_machine/test_turing_machine.py
@@ -0,0 +1,14 @@
+"""Integration test using turing_machine.py"""
+# pylint: disable=pointless-statement,import-error
+from external.bazel_python.pytest_helper import main
+import turing_machine
+
+def test_turing_machine():
+ """Regression test for the Turing machine example."""
+ ts = turing_machine.Main()
+ state, tape, index = turing_machine.PrintTMState(ts)
+ assert state == "/:State:B"
+ assert tape == ["/:Symbol:1", "X"]
+ assert index == 1
+
+main(__name__, __file__)
diff --git a/examples/turing_machine/turing_machine.py b/examples/turing_machine/turing_machine.py
new file mode 100644
index 0000000..3040448
--- /dev/null
+++ b/examples/turing_machine/turing_machine.py
@@ -0,0 +1,156 @@
+"""Example program implementing a Turing machine in TSLang.
+
+Note that we use a few special names, namely "tc" representing the Triplet
+Structure being operated on and "rt" representing the Runtime operating on that
+structure.
+"""
+from ts_lib import TripletStructure
+from ts_utils import RegisterRule
+from runtime.runtime import TSRuntime
+
+def Main():
+ """Builds a basic TM with one transition rule.
+
+ In effect, we're building up a 'graph' where nodes represent a few possible
+ things:
+ 1. 'Prototypical types' representing concepts such as "the A state" or "the
+ leftmost symbol."
+ 2. Transition rules, which are themselves composed of nodes that are mapped
+ into both the prototypical types declared here and those of the
+ 'underlying type checker,' such as /RULE and /IMPLICANT.
+ 3. Nodes representing the current state and the machine tape.
+
+ If @return_proposals=False, does not print the proposals. Useful for
+ automated testing.
+ """
+ ts = TripletStructure()
+ # Initialize prototypes for the two states. Note that this is not strictly
+ # necessary, as nodes will be created implicitly when first referenced, but
+ # it's nice to explicitly declare "standard" nodes in one place. Also note
+ # that the ts.scope(...) blocks ensure these nodes are named eg.
+ # ts["/:State:A"], not just ts["/:A"].
+ # pylint: disable=pointless-statement
+ with ts.scope(":State"):
+ ts[":A, :B"]
+ # Initialize prototypes for the types of symbols on the tape.
+ with ts.scope(":Symbol"):
+ ts[":0, :1, :2"]
+ # Initialize a prototype representing the current tape symbol.
+ ts[":Mark"]
+ # Initialize prototypes for, effectively, the "next to relation." One of
+ # the prototype nodes is "the one on the left" and the other is "the one on
+ # the right."
+ with ts.scope(":NextPair"):
+ ts[":Left, :Right"]
+ # Add a transition rules.
+ TransitionRule(ts,
+ name=":Transition0A",
+ state=ts[":State:A"],
+ read_symbol=ts[":Symbol:2"],
+ write_symbol=ts[":Symbol:1"],
+ direction="R",
+ statep=ts[":State:B"])
+ # Add a node corresponding to the current state and map it as an instance
+ # of state A. The "??"s are filled in until a unique name is found. They
+ # are convenient to use for avoiding possible name collisions (especially
+ # in macro code).
+ ts[":MState"].map({ts[":CurrentState"]: ts[":State:A"]})
+ # Add the initial symbol to the tape, initialized to 2.
+ ts[":MSymbolType"].map({ts[":OriginSymbol"]: ts[":Symbol:2"]})
+ # Mark the current symbol.
+ ts[":MSymbolMark"].map({ts[":OriginSymbol"]: ts[":Mark"]})
+ # The TSRuntime will parse and apply the rules to the structure.
+ rt = TSRuntime(ts)
+ # Print the state of the TM.
+ PrintTMState(ts)
+ # Execute one step.
+ proposals = list(rt.propose_all())
+ assert len(proposals) == 1
+ print("Taking one step...")
+ _, delta = proposals[0]
+ delta.apply()
+ PrintTMState(ts)
+ return ts
+
+def TransitionRule(
+ ts, name, state, read_symbol, write_symbol, direction, statep):
+ """Adds a transition rule to the structure."""
+ print(f"Adding Transition Rule {name}:\n" +
+ f"\tFrom State: {state}, Read Symbol: {read_symbol}\n" +
+ f"\tTo State: {statep}, Write Symbol: {write_symbol}\n" +
+ f"\tMove: {direction}")
+ with ts.scope(name):
+ # We must have the current state and the current symbol. We will
+ # overwrite this information when the rule is applied.
+ with ts.scope(":MustMap:Subtract"):
+ ts[":MState"].map({ts[":State"]: state})
+ ts[":MSymbol"].map({ts[":Symbol"]: read_symbol})
+ ts[":MMarker"].map({ts[":Symbol"]: ts["/:Mark"]})
+
+ # If it exists, we should map against the next symbol we want. If it
+ # does not exist, we should insert it.
+ with ts.scope(":TryMap:OrInsert"):
+ if direction == "L":
+ ts[":MNewSymbol"].map({
+ ts[":NewSymbol"]: ts["/:NextPair:Left"],
+ ts[":Symbol"]: ts["/:NextPair:Right"],
+ })
+ new_symbol = ts[":NewSymbol"]
+ elif direction == "R":
+ ts[":MNewSymbol"].map({
+ ts[":Symbol"]: ts["/:NextPair:Left"],
+ ts[":NewSymbol"]: ts["/:NextPair:Right"],
+ })
+ new_symbol = ts[":NewSymbol"]
+ else:
+ new_symbol = ts[":Symbol"]
+
+ with ts.scope(":Insert"):
+ ts[":MState"].map({ts[":State"]: statep})
+ ts[":MSymbol"].map({ts[":Symbol"]: write_symbol})
+ ts[":MMarker"].map({new_symbol: ts["/:Mark"]})
+
+ RegisterRule(ts, auto_assert_equal=True)
+
+def PrintTMState(ts):
+ """Pretty-print the current state of the TM."""
+ print("Printing Current Turing Machine:")
+ assert len(ts.lookup(None, "/:CurrentState", None)) == 1
+ current_state = ts.lookup(None, "/:CurrentState", None)[0][2]
+ print("\tCurrent state:", current_state)
+
+ assert len(ts.lookup(None, None, "/:Mark")) == 1
+ head_node = ts.lookup(None, None, "/:Mark")[0][1]
+
+ symbols = ["/:OriginSymbol"]
+ while True:
+ left = GetNodeNeighbor(ts, symbols[0], "/:NextPair:Left")
+ if left:
+ symbols.insert(0, left)
+ right = GetNodeNeighbor(ts, symbols[-1], "/:NextPair:Right")
+ if right:
+ symbols.append(right)
+ if not (left or right):
+ break
+ head_index = symbols.index(head_node)
+ symbols = [ts.lookup("/:MSymbolType", node, None)[0][2]
+ if ts.lookup("/:MSymbolType", node, None) else "X"
+ for node in symbols]
+ print("\tCurrent Tape Contents:", symbols)
+ print("\tCurrent Head Index:", head_index)
+ return (current_state, symbols, head_index)
+
+def GetNodeNeighbor(ts, node, side):
+ """Returns node on the @side side of @node in @ts."""
+ opposite_side = dict({
+ "/:NextPair:Right": "/:NextPair:Left",
+ "/:NextPair:Left": "/:NextPair:Right",
+ })[side]
+ try:
+ fact = ts.lookup(None, node, opposite_side)[0][0]
+ return ts.lookup(fact, None, side)[0][1]
+ except IndexError:
+ return None
+
+if __name__ == "__main__":
+ Main()
diff --git a/mapper.py b/mapper.py
new file mode 100644
index 0000000..291d79a
--- /dev/null
+++ b/mapper.py
@@ -0,0 +1,121 @@
+"""Rules to construct mappings between parts of a structure.
+
+'Mappings', aka 'abstractions' or 'joins', are new structures which contain the
+shared facts of two or more other sub-structures.
+
+TODO(masotoud): Right now we build mappings up fact-by-fact. Instead, once we
+build an initial mapping, we can find new 'instances' by turning that mapping
+into a production rule.
+"""
+from ts_utils import RegisterRule, RegisterPrototype
+
+def MapperCodelet(ts, name=":Mapper"):
+ """Adds rules for the Mapper codelet to @ts.
+ """
+ with ts.scope(name) as scope:
+ NoSlip(ts, scope)
+ HelperRules(ts, scope)
+ return scope
+
+def NoSlip(ts, scope):
+ """Prototypes extending a mapping without 'real slips.'
+
+ Basically, these rules handle mappings where the relations line up, such as
+ abc -> bcd, lmn -> mno and more complicated ones like abcefg -> fgh, xyzabc
+ -> bcd.
+
+ It *cannot* handle 'deeper' mappings, where the important relations
+ actually change. Eg. abc -> bcd, nml -> mlk (here, successor is 'really
+ slipping' with predecessor). Such mappings could be handled with a similar
+ rule, but they're a lot harder to deal with because then the mapping chosen
+ is more ambiguous.
+ """
+ with ts.scope(":NoSlipRules"):
+ ts[":MA"].map({ts[":A"]: ts[":C"]})
+ ts[":MB"].map({ts[":B"]: ts[":C"]})
+
+ ts[":IsAbstractionA"].map({ts[":MAlphaA"]: scope[":Abstraction"]})
+ ts[":IsAbstractionB"].map({ts[":MAlphaB"]: scope[":Abstraction"]})
+
+ ts[":MAlphaA"].map({ts[":A"]: ts[":AlphaAB"]})
+ ts[":MAlphaB1"].map({ts[":B"]: ts[":AlphaAB"]})
+
+ ts[":MAlphaA"].map({ts[":MA"]: ts[":AlphaMAB"]})
+ ts[":MAlphaB2"].map({ts[":MB1"]: ts[":AlphaMAB"]})
+
+ ts[":NewAlphaMAB"].map({ts[":AlphaAB"]: ts[":C"]})
+
+ RegisterPrototype(ts, dict({
+ ":HotBegin": {ts["/MUST_MAP"]: ts[":A, :B, :C, :MA, :MB"]},
+ ":NewConcrete": {ts["/INSERT"]: ts[":AlphaAB, :NewAlphaMAB"]},
+ ":NewMap": {ts["/INSERT"]: ts[":AlphaMAB, :NewAlphaMAB"]},
+ ":NewFact": {ts["/INSERT"]: [ts[":NewAlphaMAB"]]},
+ ":IntoExisting": {
+ ts["/INSERT"]:
+ ts[":IsAbstractionB, :MAlphaB, :MAlphaB1, :MAlphaB2"],
+ },
+ ":ExistingConcrete": {ts["/INSERT"]: [ts[":MAlphaB1"]]},
+ ":ExistingMap": {ts["/INSERT"]: [ts[":MAlphaB2"]]},
+ ":Concretize?Concrete": {
+ ts["/INSERT"]: [ts[":B"], ts[":MAlphaB1"]],
+ },
+ ":Concretize?Map": {
+ ts["/TRY_MAP"]: [ts[":MB"]],
+ ts["/INSERT"]: [ts[":MB"], ts[":MB1"], ts[":MAlphaB2"]],
+ },
+ ":ConcretizeMap": {
+ ts["/INSERT"]: [ts[":MB"], ts[":MB1"], ts[":MAlphaB2"]],
+ },
+ ":Concretize?Fact": {
+ ts["/INSERT"]: [ts[":MB"]],
+ },
+ }), equal=[
+ set({ts[":NewAlphaMAB"], ts[":AlphaMAB"]}),
+ set({ts[":MAlphaB1"], ts[":MAlphaB2"], ts[":MAlphaB"]}),
+ set({ts[":MB"], ts[":MB1"]}),
+ ], maybe_equal=[
+ set({ts[":A"], ts[":B"]}),
+ set({ts[":MA"], ts[":MB"]}),
+ ])
+
+def HelperRules(ts, scope):
+ """Helper patterns used by the analogy-making heuristics.
+
+ See analogy_utils.py for more details on where these patterns are used.
+ """
+ with ts.scope(":NotFunction"):
+ with ts.scope(":MustMap"):
+ ts[":IsAbstractionA"].map({ts[":MAlphaA"]: scope[":Abstraction"]})
+ ts[":MAlphaA"].map({ts[":A"]: ts[":AlphaA1"]})
+ ts[":MAlphaA"].map({ts[":A"]: ts[":AlphaA2"]})
+ RegisterRule(ts)
+
+ with ts.scope(":NotInjective"):
+ with ts.scope(":MustMap"):
+ ts[":IsAbstractionA"].map({ts[":MAlphaA"]: scope[":Abstraction"]})
+ ts[":MAlphaA"].map({
+ ts[":A1"]: ts[":AlphaA"],
+ ts[":A2"]: ts[":AlphaA"],
+ })
+ RegisterRule(ts)
+
+ with ts.scope(":MissingFacts"):
+ with ts.scope(":MustMap"):
+ ts[":MA"].map({ts[":A"]: ts[":C"]})
+ ts[":IsAbstractionA"].map({ts[":MAlphaA"]: scope[":Abstraction"]})
+ # TODO: maybe this should be in NoMap?
+ ts[":MAlphaA"].map({ts[":A"]: ts[":AlphaAB"]})
+ ts[":MAlphaA"].map({ts[":MA"]: ts[":AlphaMAB"]})
+ with ts.scope(":NoMap"):
+ ts[":AlphaMAB"].map({ts[":AlphaAB"]: ts[":C"]})
+ RegisterRule(ts, auto_assert_equal=True)
+
+ with ts.scope(":UnlowerableFacts"):
+ with ts.scope(":MustMap"):
+ ts[":AlphaMAB"].map({ts[":AlphaAB"]: ts[":C"]})
+ ts[":IsAbstractionA"].map({ts[":MAlphaA"]: scope[":Abstraction"]})
+ ts[":MAlphaA"].map({ts[":A"]: ts[":AlphaAB"]})
+ ts[":MAlphaA"].map({ts[":MA"]: ts[":AlphaMAB"]})
+ with ts.scope(":NoMap"):
+ ts[":MA"].map({ts[":A"]: ts[":C"]})
+ RegisterRule(ts, auto_assert_equal=True)
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..efc6a39
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,3 @@
+coverage==5.3
+pytest==6.0.2
+pybind11==2.5.0
diff --git a/runtime/BUILD b/runtime/BUILD
new file mode 100644
index 0000000..ed2610b
--- /dev/null
+++ b/runtime/BUILD
@@ -0,0 +1,74 @@
+package(default_visibility = ["//:__subpackages__"])
+
+py_library(
+ name = "runtime",
+ srcs = ["runtime.py"],
+ deps = [
+ ":cpp_structure",
+ ":interactive",
+ ":matcher",
+ ":production_rule",
+ ],
+)
+
+py_library(
+ name = "cpp_structure",
+ srcs = ["cpp_structure.py"],
+ deps = [
+ ":utils",
+ ],
+)
+
+py_library(
+ name = "production_rule",
+ srcs = ["production_rule.py"],
+ deps = [
+ ":assignment",
+ ":pattern",
+ ":utils",
+ ],
+)
+
+py_library(
+ name = "interactive",
+ srcs = ["interactive.py"],
+ deps = [
+ ":shadow_input",
+ ],
+)
+
+py_library(
+ name = "shadow_input",
+ srcs = ["shadow_input.py"],
+ deps = [],
+)
+
+py_library(
+ name = "utils",
+ srcs = ["utils.py"],
+ deps = [],
+)
+
+py_library(
+ name = "assignment",
+ srcs = ["assignment.py"],
+ deps = [
+ ":utils",
+ ],
+)
+
+py_library(
+ name = "pattern",
+ srcs = ["pattern.py"],
+ deps = [
+ ":utils",
+ ],
+)
+
+py_library(
+ name = "matcher",
+ srcs = ["matcher.py"],
+ deps = [
+ ":utils",
+ ],
+)
diff --git a/runtime/README.md b/runtime/README.md
new file mode 100644
index 0000000..e080a99
--- /dev/null
+++ b/runtime/README.md
@@ -0,0 +1,33 @@
+# Triplet Structure Runtime
+
+This folder contains a triplet structure runtime implementation. The goal of
+the runtime is to take a Triplet Structure (see ``../ts_lib.py``), parse the
+"operational" nodes (eg. rules and tactics), then apply the rules so that some
+goal is achieved (eg. getting rid of TOP nodes).
+
+At a high level:
+- `runtime.py` operates as the main data structure which keeps track of the
+ information needed and produced during the execution.
+- `solver.py` has methods that allow for existential queries on the
+ corresponding Structure. For example, you might look for nodes 1, 2 such that
+ facts (1, 2, "/:Mapper:TOP") and (2, 1, 2) both hold in the structure.
+- `pattern.py` is essentially a higher-level interface to the Solver. Each
+ `Pattern` instance corresponds to a single pattern/existential query.
+- `production_rule.py` can be thought of as an _even higher_ level interface to
+ the `Patterns`. Each `ProductionRule` corresponds to one ``/RULE` node in the
+ Structure. ProductionRules can have relatively complicated mapping
+ strategies, eg. "find an assignment satisfying ... but not ..."
+- `assignment.py` describes a satisfying assignment to a `ProductionRule`.
+ `Assignment`s are an intermediate step between matching a pattern and then
+ constructing the actual delta corresponding to the match/rule.
+- `delta.py` describes `TSDeltas`, which describe a modification (add/remove
+ nodes/facts) to the structure. They are produced by `Assignments` which are
+ produced by `ProductionRules`.
+- `tactic.py` describes `Tactic`s, which are essentially heuristics that
+ control the application of rules (and other tactics).
+- `interactive.py` is an interactive REPL for manually/semi-automatically
+ applying rules, tactics, etc. to the structure.
+- `shadow_input.py` is used by `interactive.py` to allow the user to record,
+ save, and replay their commands.
+- `utils.py` contains helper methods for the rest of the runtime. Most useful
+ is the Translator, which cleans up some common operations with dictionaries.
diff --git a/runtime/assignment.py b/runtime/assignment.py
new file mode 100644
index 0000000..76d5b65
--- /dev/null
+++ b/runtime/assignment.py
@@ -0,0 +1,145 @@
+"""Methods handling satisfying rule assignments.
+"""
+# pylint: disable=import-error,no-name-in-module
+import runtime.utils as utils
+
+class Assignment:
+ """Represents a single satisfying assignment to a /RULE in the Structure.
+
+ This class is effectively an intermediary between ProductionRule and
+ TSDelta.
+ """
+ def __init__(self, rule, assignment):
+ """Initialize the Assignment.
+ """
+ self.ts = rule.ts
+ self.rule = rule
+ self.assignment = assignment.copy()
+ self.base_hash = utils.real_hash(assignment)
+
+ def apply(self):
+ """Applies the rule + assignment to the structure and returns the map.
+
+ NOTE: Does *NOT* wrap the delta. The caller should do this if they want
+ to.
+ """
+ # We will update this dictionary with newly-created nodes as necessary.
+ running_assignment = self.assignment.copy()
+
+ self.add_nodes(running_assignment)
+ added_facts = self.add_facts(running_assignment)
+ self.remove(running_assignment, added_facts)
+
+ return running_assignment
+
+ def add_nodes(self, running_assignment):
+ """Adds /INSERT nodes and updates @running_assignment.
+ """
+ for node in self.unassigned_of_type(running_assignment, "/INSERT"):
+ # NOTE: This also adds the node to the structure.
+ new_node = self.ts[self.node_name(node) + ":??"].full_name
+ running_assignment[node] = new_node
+ for equivalent_node in self.rule.equal[node]:
+ running_assignment[equivalent_node] = new_node
+
+ def add_facts(self, running_assignment):
+ """Adds facts to the structure.
+
+ The add_facts are rule facts where all rule-nodes are assigned and not
+ removed (/subtracted?).
+ """
+ translator = utils.Translator(running_assignment)
+ ignore_nodes = set(self.rule.nodes_by_type["/REMOVE"])
+ must_include = set(self.rule.nodes_by_type["/INSERT"])
+ relevant_nodes = set(running_assignment.keys()) - ignore_nodes
+
+ new_facts = []
+ for fact in self.facts_of_nodes(sorted(relevant_nodes)):
+ args = set(fact)
+ if ((args & must_include) and
+ (set(fact) & self.rule.all_nodes) <= relevant_nodes):
+ new_facts.append(translator.translate_tuple(fact))
+
+ self.ts.add_facts(new_facts)
+ return set(new_facts)
+
+ def remove(self, running_assignment, added_facts):
+ """Removes the relevant nodes & facts.
+
+ We remove nodes that are marked /REMOVE or ones that are marked
+ /SUBTRACT which have no facts after subtraction.
+ """
+ # (1) First, we just remove the /REMOVE nodes and any facts referencing
+ # them.
+ for node in self.assigned_of_type(running_assignment, "/REMOVE"):
+ node = running_assignment[node]
+ self.ts[node].remove_with_facts()
+
+ # (2) Then we remove /SUBTRACT facts.
+ # NOTE: Currently, addition takes precedence over removal. So if you
+ # '/INSERT' a fact that already exists and '/REMOVE' or '/SUBTRACT' it
+ # at the same time, it will end up *still in the structure*. The
+ # example for where this semantics is useful is for something like the
+ # Turing Machine example, where you might want to express keeping the
+ # same head position as 'remove the current head position then put it
+ # back in the same spot.'
+ translator = utils.Translator(running_assignment)
+ subtract = set(self.assigned_of_type(running_assignment, "/SUBTRACT"))
+ for fact in self.assigned_rule_facts(running_assignment):
+ if set(fact) & subtract and fact not in added_facts:
+ self.ts.remove_fact(translator.translate_tuple(fact))
+
+ # (3) Then, we remove /SUBTRACT nodes which have no facts.
+ for node in self.assigned_of_type(running_assignment, "/SUBTRACT"):
+ node = running_assignment[node]
+ if not self.ts.facts_about_node(node, True):
+ self.ts[node].remove()
+
+ def node_name(self, node):
+ """Returns the name to use for produced node @node.
+
+ Ensures node names are deterministic and reproducible, regardless of
+ when exactly the match happened.
+ """
+ return "/:" + utils.real_hash("{}{}".format(self.base_hash, node))
+
+ def assigned_rule_facts(self, running_assignment):
+ """Returns rule facts which do not involve remaining unassigned nodes.
+ """
+ assigned_rule_nodes = set(running_assignment.keys())
+ for fact in self.rule.facts:
+ fact_rule_nodes = set(fact) & self.rule.all_nodes
+ if fact_rule_nodes <= assigned_rule_nodes:
+ yield fact
+
+ def assigned_of_type(self, running_assignment, of_type):
+ """Returns nodes already assigned to of a particular type.
+ """
+ for node in self.rule.nodes_by_type[of_type]:
+ if node in running_assignment:
+ yield node
+
+ def unassigned_of_type(self, running_assignment, of_type):
+ """Returns nodes not yet assigned which are @of_type in self.rule.
+
+ Eg., you could use this to return all nodes of_type=/INSERT which are
+ not already constructed (i.e., in running_assignment).
+ """
+ for node in self.rule.nodes_by_type[of_type]:
+ if node not in running_assignment:
+ yield node
+
+ def facts_of_nodes(self, nodes):
+ """Helper method to return facts with one of @nodes in the first slot.
+
+ NOTE: This uses the cached copy from @self.rule.indexed_facts, meaning
+ these are such facts which were in the structure when @rule was
+ initialized. We do this so we can remove rule-related nodes from the
+ structure after initialization.
+ """
+ assert set(nodes) <= set(self.rule.all_nodes)
+ return (fact for node in nodes
+ for fact in self.rule.indexed_facts[node])
+
+ def __str__(self):
+ return str(self.assignment)
diff --git a/runtime/cpp_structure.py b/runtime/cpp_structure.py
new file mode 100644
index 0000000..a97b80b
--- /dev/null
+++ b/runtime/cpp_structure.py
@@ -0,0 +1,153 @@
+"""Python wrappers for the C++ solver."""
+from collections import defaultdict
+# pylint: disable=no-name-in-module
+from ts_cpp import Structure, Triplet, Solver
+import runtime.utils as utils
+
+class CPPStructure:
+ """Represents an optimized TripletStructure.
+
+ Notably, the optimized TripletStructure is implemented in C++ and nodes are
+ referenced by numerical indices, not strings.
+ """
+ def __init__(self, ts):
+ """Initialize the CPPStructure."""
+ self.ts = ts
+ self.cpp = Structure()
+ self.dictionary = dict({node: (i+1) for i, node in enumerate(ts.nodes)})
+ self.dictionary_back = [None] + ts.nodes
+
+ self.translator = utils.Translator(self.dictionary)
+ existing = self.translator.translate_tuples(
+ ts.lookup(None, None, None, read_direct=True))
+ for fact in existing:
+ self.cpp.addFact(*fact)
+
+ ts.shadow = self
+
+ def solve(self, pattern):
+ """Given a CPPPattern, yields solutions to it in the structure."""
+ if not pattern.valid:
+ return
+ if not pattern.sorted_variables:
+ if all([self.ts.lookup(*fact, read_direct=True)
+ for fact in pattern.raw_constraints]):
+ yield {}
+ return
+
+ solver = Solver(self.cpp, len(pattern.sorted_variables),
+ pattern.constraints, pattern.maybe_equal)
+
+ while solver.isValid():
+ assignment = solver.nextAssignment()
+ if assignment:
+ # Need to convert back to a dict with the original ordering.
+ real_assignment = dict()
+ for i, variable in enumerate(pattern.sorted_variables):
+ node = self.dictionary_back[assignment[i]]
+ real_assignment[variable] = node
+ yield real_assignment
+ else:
+ return
+
+ def assignments(self, constraints, maybe_equal=None):
+ """Yields assignments to the constraints."""
+ pattern = CPPPattern(self, constraints, maybe_equal)
+ yield from self.solve(pattern)
+
+ def add_node(self, node):
+ """Add a node to the structure."""
+ if node not in self.dictionary:
+ self.dictionary[node] = len(self.dictionary) + 1
+ self.dictionary_back.append(node)
+
+ def remove_node(self, node):
+ """No-op.
+
+ Unconstrained nodes in patterns are not supported, hence for
+ pattern-solving purposes a node is considered to be in the CPPStructure
+ iff there are facts using it. the 'add_node' method above only assigns
+ the node a numerical ID.
+ """
+
+ def add_fact(self, fact):
+ """Add a fact to the structure."""
+ self.cpp.addFact(*self.translator.translate_tuple(fact))
+
+ def remove_fact(self, fact):
+ """Remove a fact from the structure."""
+ self.cpp.removeFact(*self.translator.translate_tuple(fact))
+
+class CPPPattern:
+ """Represents a pre-processed existential search query.
+
+ For example, we might search for [(1, 3, 3), (1, 1, "/:A")] where 1 and 3
+ are allowed to be equal. The C++ solver enforces a number of constraints
+ that are not assumed on the Python side:
+ 1. Constants like "/:A" need to be replaced by their corresponding
+ (positive) number in @cppstruct.dictionary.
+ 2. Variables need to be numbered in decreasing order starting from 0 ---
+ no positive variable numbers and no gaps.
+ 3. Variables should be ordered in the order that they should be searched
+ for in the structure (i.e., ordering heuristics must be computed on the
+ Python side).
+ For example, the [(1,3,3),(1,1,"/:A")] pattern might get pre-processed to
+ the pattern [(-1,0,0),(-1,-1,1)], where -1<->1, 0<->3, and 1<->"/:A".
+ """
+ cached = dict()
+ def __init__(self, cppstruct, constraints, maybe_equal):
+ """Initialize and pre-process the pattern."""
+ frozen = tuple(constraints)
+ if (frozen in CPPPattern.cached
+ and CPPPattern.cached[frozen][0] == maybe_equal):
+ cached = CPPPattern.cached[frozen][1]
+ self.raw_constraints = cached.raw_constraints
+ self.constraints = cached.constraints
+ self.valid = cached.valid
+ self.sorted_variables = cached.sorted_variables
+ self.maybe_equal = cached.maybe_equal
+ return
+ assert constraints
+ self.raw_constraints = constraints
+ # First, sort the variables.
+ sorted_variables = []
+ variables = sorted(set(i for fact in constraints for i in fact if isinstance(i, int)))
+ n_fixed = [sum(isinstance(arg, str) for arg in constraint)
+ for constraint in constraints]
+ def _compare_goodness_key(variable):
+ return (n_fixed[variable] != 3,
+ n_fixed[variable],
+ str(constraints[variable]).count(":"))
+ for _ in range(len(variables)):
+ best_constraint = max(range(len(constraints)),
+ key=_compare_goodness_key)
+ arg = next(arg for arg in constraints[best_constraint]
+ if (not isinstance(arg, str) and
+ arg not in sorted_variables))
+ sorted_variables.append(arg)
+ for i, constraint in enumerate(constraints):
+ if arg in constraint:
+ n_fixed[i] += 1
+ # Then, rewrite the constraints with the sorted variables.
+ translation = dict({old_var: -i
+ for i, old_var in enumerate(sorted_variables)})
+ try:
+ self.constraints = [
+ Triplet(*[translation[arg] if arg in translation
+ else cppstruct.dictionary[arg]
+ for arg in constraint])
+ for constraint in constraints]
+ except KeyError:
+ # E.g. the pattern uses a node that's not in the structure.
+ self.valid = False
+ return
+ self.valid = True
+ # Keep for the back-translation.
+ self.sorted_variables = sorted_variables
+ raw_maybe_equal = maybe_equal
+ maybe_equal = maybe_equal or defaultdict(set)
+ self.maybe_equal = [
+ set({abs(translation[var]) for var in maybe_equal[v]
+ if var in translation})
+ for v in sorted_variables]
+ CPPPattern.cached[frozen] = (raw_maybe_equal, self)
diff --git a/runtime/interactive.py b/runtime/interactive.py
new file mode 100644
index 0000000..08f9d0f
--- /dev/null
+++ b/runtime/interactive.py
@@ -0,0 +1,164 @@
+"""An interactive front-end for Triplet Structures.
+
+Call from runtime.py:interactive(...).
+"""
+# pylint: disable=import-error,no-name-in-module
+from runtime.shadow_input import ShadowInput
+
+class TSREPL:
+ """An interactive interface for the TSRuntime.
+ """
+ def __init__(self, runtime):
+ """Initializes a new TSREPL.
+ """
+ self.runtime = runtime
+ self.ts = runtime.ts
+ self.input = ShadowInput()
+
+ def run(self):
+ """Run the REPL.
+ """
+ while True:
+ if self.iteration() == "EXIT":
+ return
+
+ def iteration(self):
+ """Prompt the user for an action and execute it.
+ """
+ print("Starting a new iteration. Please pick an option:")
+ option = self.select_one([
+ "Manually apply individual rules.",
+ "Automatically apply all rules matching pattern.",
+ "Print structure.",
+ "Begin recording command sequence.",
+ "Save command sequence.",
+ "Load command sequence.",
+ "Set display name.",
+ "Prefer nodes.",
+ "Exit interactive session.",
+ ])
+ if option == 0:
+ self.apply_manual()
+ elif option == 1:
+ self.to_fixedpoint()
+ elif option == 2:
+ print(self.ts)
+ elif option == 3:
+ self.input.begin_recording()
+ elif option == 4:
+ file_name = self.input("Save to file: ")
+ self.input.scrub_last(2)
+ self.input.save_recording(file_name)
+ elif option == 5:
+ file_name = self.input("Load from file: ")
+ self.input.load_recording(file_name)
+ elif option == 6:
+ self.set_display_name()
+ elif option == 7:
+ self.prefer_nodes()
+ elif option == 8:
+ return "EXIT"
+ else:
+ raise NotImplementedError
+ return None
+
+ def to_fixedpoint(self):
+ """Apply rule(s) until fixedpoint is reached.
+ """
+ rules = self.pick_rules()
+ search_term = self.input("Search proposals for term: ").lower()
+ n_matches = int(self.input("Apply if at least this many matches: "))
+ fixedpoint = False
+ while not fixedpoint:
+ fixedpoint = True
+ for _, delta in self.runtime.propose_all(rules):
+ if str(delta).lower().count(search_term) >= n_matches:
+ print("Applying proposal:")
+ print(delta)
+ delta.apply()
+ fixedpoint = False
+ break
+
+ def apply_manual(self):
+ """Manually apply rule(s).
+ """
+ rules = self.pick_rules()
+ search_term = self.input("Filter proposals for term: ").lower()
+ n_matches = int(self.input("With at least this many matches: ") or "0")
+ while True:
+ for _, delta in self.runtime.propose_all(rules):
+ if str(delta).lower().count(search_term) < n_matches:
+ continue
+ print(delta)
+ choice = self.input("Apply? (y/N/q): ").lower() or "n"
+ if choice[0] == "y":
+ delta.apply()
+ break
+ if choice[0] == "q":
+ return
+ else:
+ return
+
+ def set_display_name(self):
+ """Allows the user to set a display name for a particular node.
+ """
+ search_term = self.input("Existing display name: ")
+ matching = [
+ node for node, display_name in self.ts.display_names.items()
+ if search_term in display_name]
+ if matching:
+ node_name = min(
+ matching, key=lambda node: len(self.ts.display_names[node]))
+ new_name = self.input("Please select a new display name: ")
+ self.ts[node_name].display_name(new_name)
+ else:
+ print("No matching node.")
+
+ def prefer_nodes(self):
+ """Tell the runtime to prefer particular nodes in matches.
+ """
+ search_term = self.input("Prefer nodes containing string: ")
+ self.runtime.affinity.prefer_nodes(lambda node: search_term in node)
+
+ def pick_rules(self):
+ """Helper to allow the user to select rules to apply.
+ """
+ names = [rule.name for rule in self.runtime.rules]
+ print("Please select the rule(s):")
+ indices = self.select_multiple(names)
+ return [names[i] for i in indices]
+
+
+ def select_one(self, options):
+ """Helper method for selecting an option.
+ """
+ for i, option in enumerate(options):
+ print("{}: {}".format(i, option))
+ index = self.input("Selection: ").lower()
+ try:
+ option = options[int(index)]
+ return int(index)
+ except (ValueError, IndexError):
+ try:
+ # Treat it as a search term
+ return next(i for i, option in enumerate(options)
+ if index in str(option).lower())
+ except StopIteration:
+ print("Invalid choice, try again.")
+ return self.select_one(options)
+
+ def select_multiple(self, options):
+ """Helper method for selecting multiple options.
+ """
+ for i, option in enumerate(options):
+ print("{}: {}".format(i, option))
+ term = self.input("Selections: ").lower()
+ try:
+ if "-" in term:
+ from_index, to_index = map(int, term.split("-")[:2])
+ else:
+ from_index, to_index = int(term), int(term)
+ return list(range(from_index, to_index + 1))
+ except ValueError:
+ return [i for i, option in enumerate(options)
+ if term in str(option).lower()]
diff --git a/runtime/matcher.py b/runtime/matcher.py
new file mode 100644
index 0000000..4e3d2e4
--- /dev/null
+++ b/runtime/matcher.py
@@ -0,0 +1,273 @@
+"""Defines a Matcher, which can efficiently enumerate ProductionRule matches.
+
+This file defines the Matcher class, which keeps track of all matches to a
+ProductionRule (optionally satisfying some partial assignment). It is
+particularly optimized for applying rules until fixedpoint is reached, see eg.
+../tactic_utils.py:RuleFixedpoint.
+
+The main benefit is that Matcher can do "differential updates:" it keeps track
+of all matches, and when the structure is modified it only needs to update
+matches relevant to the delta between the old structure and modified one.
+
+This works because ProductionRules are matched against in three 'layers:'
+
+1. MustMap, 2. TryMap, 3. NoMap.
+
+The key observation each one is *MONOTONIC*: eg. if you add a new fact, the
+MustMap assignments in the new structure are a strict superset of those of the
+old structure (and the new assignments *must* make use of the added fact). So,
+when new facts are added, we can keep all of the old assignments and only check
+new assignments which use the new facts.
+
+Eg. given constraints [(1, 2, 3)] and new facts [(A, B, C), (B, C, D)], we only
+need to check for new assignments which map {1: A, 2: B, 3: C} or {1: B, 2: C,
+3: D}.
+
+Note that profiling shows the vast majority of time is spent in MustMap, hence
+this implementation only optimizes/caches that layer (it re-computes TryMap,
+NoMap on every sync). In the future we can think through how to do differential
+updates to the latter two layers as well.
+"""
+from collections import defaultdict
+# pylint: disable=import-error,no-name-in-module
+import runtime.utils as utils
+from runtime.utils import freezedict, thawdict
+from runtime.assignment import Assignment
+
+class Matcher:
+ """Keeps track of all assignments to a ProductionRule in the runtime."""
+ def __init__(self, rt, rule, partial):
+ """Initialize a Matcher."""
+ self.rt = rt
+ self.rule = rule
+ self.freeze_frame = rt.ts.freeze_frame()
+ self.partial = partial.copy()
+ if any(isinstance(key, str) for key in partial.keys()):
+ self.partial = dict({rule.node_to_variable[key]: value
+ for key, value in partial.items()
+ if key in rule.node_to_variable})
+ # Assignments to the 'MustMap' pattern.
+ self.must_matcher = PatternMatcher(rt, self.rule.must_pattern, self.partial)
+ self.must_assignments = dict()
+ for assignment in self.must_matcher.assignments:
+ self._add_must(assignment)
+
+ def assignments(self):
+ """Yields assignments to self.rule satisfying self.partial.
+
+ Will only yield assignments as of the last call to sync().
+ """
+ node_to_variable = utils.Translator(self.rule.node_to_variable)
+ for must_assignment in sorted(self.must_assignments.keys()):
+ entry = self.must_assignments[must_assignment]
+ if any(never.assignments for never in entry["nevers"]):
+ continue
+ if entry["try"] is not None and entry["try"].assignments:
+ for assign in map(thawdict, sorted(entry["try"].assignments)):
+ yield Assignment(self.rule, node_to_variable.compose(assign))
+ else:
+ assign = thawdict(must_assignment)
+ yield Assignment(self.rule, node_to_variable.compose(assign))
+
+ def sync(self):
+ """Update the assignments."""
+ current = self.rt.ts.freeze_frame()
+ delta = self.freeze_frame.delta_to_reach(current)
+ self.freeze_frame = current
+
+ removed, added = self.must_matcher.sync(delta)
+
+ for assign in removed:
+ del self.must_assignments[assign]
+
+ for existing in self.must_assignments:
+ entry = self.must_assignments[existing]
+ invalid = False
+ for never in entry["nevers"]:
+ never.sync(delta)
+ invalid = invalid or bool(never.assignments)
+ if invalid:
+ entry["try"] = None
+ elif entry["try"] is not None:
+ entry["try"].sync(delta)
+ else:
+ entry["try"] = PatternMatcher(self.rt, self.rule.try_pattern, thawdict(existing))
+
+ for assign in added:
+ self._add_must(assign)
+
+ def _add_must(self, frozen):
+ """Adds an assignment to the list of must assignments."""
+ assignment = thawdict(frozen)
+ self.must_assignments[frozen] = dict({
+ "nevers": [],
+ "try": None,
+ })
+ invalid = False
+ entry = self.must_assignments[frozen]
+ for never in sorted(self.rule.never_patterns):
+ never = self.rule.never_patterns[never]
+ matcher = PatternMatcher(self.rt, never, assignment)
+ entry["nevers"].append(matcher)
+ invalid = invalid or bool(matcher.assignments)
+ # TODO we could break if invalid, but then we'd need more logic
+ # elsewhere.
+ if not invalid:
+ entry["try"] = PatternMatcher(self.rt, self.rule.try_pattern, assignment)
+
+class PatternMatcher:
+ """Keeps track of assignments to a single Pattern (existential formula).
+ """
+ def __init__(self, rt, pattern, partial):
+ """Initialize the PatternMatcher.
+
+ @pattern should be a Pattern instance, while @partial should be the
+ same partial assignment on the owning Matcher instance.
+ """
+ self.rt = rt
+ self.pattern = pattern
+ self.partial = partial.copy()
+
+ self.assignments = set()
+ # Maps fact |-> set({assignments})
+ self.assignments_relying_on_fact = defaultdict(set)
+ # Maps assignment |-> set({facts})
+ self.facts_used_in_assignment = defaultdict(set)
+
+ # Initializes must, full.
+ self.full_sync()
+
+ def full_sync(self):
+ """Initializes self.must, self.full (non-differential).
+ """
+ for assignment in self.pattern.assignments(self.partial):
+ frozen = freezedict(assignment)
+ for constraint in self.pattern.constraints:
+ fact = tuple(assignment.get(arg, arg) for arg in constraint)
+ self.assignments_relying_on_fact[fact].add(frozen)
+ self.facts_used_in_assignment[frozen].add(fact)
+ self.assignments.add(frozen)
+
+ def sync(self, delta):
+ """Updates the set of known assignments to match the current structure.
+ """
+ removed, added = set(), set()
+
+ if not self.pattern.constraints:
+ return removed, added
+
+ # First, see if there are any assignments which rely on removed facts.
+ for fact in delta.remove_facts:
+ for relying in self.assignments_relying_on_fact[fact].copy():
+ # NOTE: this call modifies assignments_relying_on_fact, which
+ # is why we take a copy.
+ self.remove_assignment(relying)
+ removed.add(relying)
+ self.assignments_relying_on_fact.pop(fact)
+
+ # For all of the new facts, we find new assignments using them.
+ partials = set()
+ for fact in delta.add_facts:
+ for constraint in self.pattern.constraints:
+ assignment = self.unify(constraint, fact)
+ if assignment:
+ partials.add(freezedict(assignment))
+ partials -= set({freezedict(self.partial)})
+ partials = [partial for partial in map(thawdict, sorted(partials))
+ if self.pattern.is_partial(partial)]
+ for partial in partials:
+ for new_must in self.pattern.assignments(partial):
+ frozen = freezedict(new_must)
+ if frozen not in self.assignments:
+ self.add_assignment(new_must, frozen)
+ added.add(frozen)
+
+ assert not added & removed
+ return removed, added
+
+ def remove_assignment(self, frozen):
+ """Remove an existing assignment from the set of valid assignments.
+
+ Called by sync() when it is determined that the assignment is no longer
+ valid.
+ """
+ self.assignments.remove(frozen)
+ for fact in self.facts_used_in_assignment.pop(frozen):
+ self.assignments_relying_on_fact[fact].remove(frozen)
+
+ def add_assignment(self, assignment, frozen):
+ """Add a newly-valid assignment to the set of valid assignments.
+ """
+ assert frozen not in self.assignments
+
+ self.assignments.add(frozen)
+
+ for constraint in self.pattern.constraints:
+ fact = tuple(assignment.get(arg, arg) for arg in constraint)
+ self.assignments_relying_on_fact[fact].add(frozen)
+ self.facts_used_in_assignment[frozen].add(fact)
+
+ def unify(self, constraint, fact):
+ """Unifies a constraint with a fact into an assignment.
+
+ Eg. constraint = (1, 2, 3), fact = (A, B, C) -> {1:A, 2:B, 3:C}.
+ """
+ assignment = self.partial.copy()
+ inverse = defaultdict(list)
+ for arg, var in zip(fact, constraint):
+ if not isinstance(var, int):
+ # Constants must match.
+ if arg != var:
+ break
+ continue
+ if var in assignment and assignment[var] != arg:
+ # Variables must match.
+ break
+ if any(other_var not in self.pattern.maybe_equal[var]
+ for other_var in inverse[arg]):
+ break
+ assignment[var] = arg
+ inverse[arg].append(var)
+ else:
+ # We didn't break; @assignment is correct!
+ return assignment
+ return None
+
+class OneOffMatcher:
+ """Drop-in replacement for Matcher which does not save state."""
+ def __init__(self, rt, rule, partial):
+ """Initialize the OneOffMatcher."""
+ self.rt = rt
+ self.rule = rule
+ self.partial = partial.copy()
+ if any(isinstance(key, str) for key in partial.keys()):
+ self.partial = dict({rule.node_to_variable[key]: value
+ for key, value in partial.items()
+ if key in rule.node_to_variable})
+
+ def assignments(self):
+ """Solves for and yields valid rule assignments."""
+ node_to_variable = utils.Translator(self.rule.node_to_variable)
+
+ must_assignments = self.rule.must_pattern.assignments(self.partial)
+ for must_assignment in must_assignments:
+ if self.rule.invalid(must_assignment):
+ continue
+
+ try_assignments = self.rule.try_pattern.assignments(must_assignment)
+ any_assigned = False
+ for try_assignment in try_assignments:
+ any_assigned = True
+ yield Assignment(
+ self.rule, node_to_variable.compose(try_assignment))
+ # If there are no try_constraints, then try_assignments will still
+ # output the 'trivial match,' and so it will have already been
+ # checked against invalid in the above loop.
+ if not self.rule.try_pattern.constraints:
+ continue
+ if not any_assigned:
+ yield Assignment(
+ self.rule, node_to_variable.compose(must_assignment))
+
+ def sync(self):
+ """No-op for the OneOffMatcher, which is always up-to-date."""
diff --git a/runtime/pattern.py b/runtime/pattern.py
new file mode 100644
index 0000000..d2cdf4b
--- /dev/null
+++ b/runtime/pattern.py
@@ -0,0 +1,114 @@
+"""Methods to simplify looking for patterns in a Structure.
+"""
+# pylint: disable=import-error
+from collections import defaultdict
+import runtime.utils as utils
+
+class Pattern:
+ """Represents an existential query/pattern to be matched against.
+ """
+ def __init__(self, runtime, constraints, maybe_equal, variable_names):
+ """Initializes the Pattern.
+
+ Arguments
+ =========
+ - @constraints should be a list of 3-tuples with strings (representing
+ nodes) or integers (representing variables) as elements.
+ - @maybe_equal should be a dictionary mapping {variable:
+ maybe_equivalent_variables}.
+ - @variable_names should be the corresponding names (in the structure)
+ of all variables.
+ """
+ self.runtime = runtime
+ self.constraints = constraints
+ self.maybe_equal = maybe_equal
+ self.variable_names = variable_names
+
+ def named_assignment_to_vars(self, assignment):
+ """Converts an assignment mapping node names to mapping numbers."""
+ names_to_vars = dict({name: var
+ for var, name in self.variable_names.items()})
+ return dict({names_to_vars[k]: v for k, v in assignment.items()})
+
+ def n_variables(self):
+ """Returns the number of variables to solve for in the pattern."""
+ return len(set(
+ arg for fact in self.constraints for arg in fact
+ if isinstance(arg, int)))
+
+ def assignments(self, partial_assignment=None):
+ """Yields assignments satisfying the pattern.
+
+ Each assignment is a dict {variable: node}. @partial_assignment, if
+ provided, can be used to initialize some of the variables (see eg.
+ production_rule.py:typecheck_with_facts).
+ """
+ if not self.constraints and partial_assignment is not None:
+ yield partial_assignment
+ return
+ assert self.constraints
+
+ partial_assignment = partial_assignment or dict()
+ partial_assignment = utils.Translator(partial_assignment)
+
+ constraints = partial_assignment.translate_tuples(self.constraints)
+ assignments = self.runtime.solver.assignments(
+ constraints, self.maybe_equal)
+ for assignment in assignments:
+ assignment = partial_assignment.concatenated_with(assignment)
+ if self.valid_maybe_equals(assignment):
+ yield assignment
+
+ def equivalence_class(self, member):
+ """Returns the equivalence class corresponding to variable @member.
+ """
+ if self.maybe_equal and member in self.maybe_equal:
+ return self.maybe_equal[member]
+ return set({member})
+
+ def valid_maybe_equals(self, assignment):
+ """Ensures any variables assigned together are marked maybe_equal.
+
+ This happens particularly when using @partial_assignment.
+ """
+ preimages = defaultdict(set)
+ for variable, node in assignment.items():
+ preimages[node].add(variable)
+ for variables in preimages.values():
+ equivalence_class = self.equivalence_class(next(iter(variables)))
+ if not variables <= equivalence_class:
+ return False
+ return True
+
+ def add_constraint(self, constraint):
+ """Adds more constraints to the Pattern.
+ """
+ self.constraints.append(constraint)
+
+ def is_assignment(self, assignment):
+ """True iff @assignment is a valid assignment. to the pattern."""
+ return len(assignment) == self.n_variables() and self.is_partial(assignment)
+
+ def is_partial(self, partial, check_eq=True):
+ """If @partial can be extended to a valid assignment, returns True.
+
+ NOTE: This is an *IF* not if*F*.
+ If @check_eq=True, then it will always return False if @partial
+ contradicts the equality constraints of the pattern.
+ """
+ assert not partial or isinstance(list(partial.keys())[0], int)
+ if check_eq and not self.valid_maybe_equals(partial):
+ return False
+ partial = partial or dict()
+ partial = utils.Translator(partial)
+ constraints = partial.translate_tuples(self.constraints)
+ for constraint in constraints:
+ constraint = tuple(arg if isinstance(arg, str) else None
+ for arg in constraint)
+ if not self.runtime.ts.lookup(*constraint, read_direct=True):
+ return False
+ return True
+
+ def __str__(self):
+ """Human-readable version of the Pattern."""
+ return str(self.constraints)
diff --git a/runtime/production_rule.py b/runtime/production_rule.py
new file mode 100644
index 0000000..c0c6481
--- /dev/null
+++ b/runtime/production_rule.py
@@ -0,0 +1,165 @@
+"""Methods for parsing, mapping against, and executing structure /RULEs.
+"""
+# pylint: disable=no-name-in-module,import-error
+from collections import defaultdict
+from runtime.pattern import Pattern
+import runtime.utils as utils
+
+MAP_TYPES = ["/MUST_MAP", "/TRY_MAP", "/NO_MAP"]
+ACTION_TYPES = ["/REMOVE", "/SUBTRACT", "/INSERT"]
+NODE_TYPES = MAP_TYPES + ACTION_TYPES
+
+class ProductionRule:
+ """Represents a single /RULE in the TripletStructure.
+ """
+ def __init__(self, runtime, rule):
+ """Initializes a ProductionRule given the corresponding node.
+
+ NOTE: The structure can be modified arbitrarily once a ProductionRule
+ is initialized; all relevant information is copied into the object
+ itself.
+ """
+ self.runtime = runtime
+ self.ts = runtime.ts
+ self.name = rule
+ self.is_backtracking = None
+ self.parse_rule()
+ self.assign_variables()
+ self.prepare_constraints()
+ self.facts = [fact for node in self.all_nodes
+ for fact in self.ts.lookup(node, None, None)]
+ self.indexed_facts = dict({
+ node: self.ts.lookup(node, None, None).copy()
+ for node in self.all_nodes
+ })
+
+ def parse_rule(self):
+ """Parses the relevant nodes to the rule (eg. MUST_MAP, etc.)
+ """
+ rule_node, solver = self.name, self.runtime.solver
+
+ self.nodes_by_type = defaultdict(list)
+ self.nodes_by_type["/NO_MAP"] = defaultdict(list)
+ self.map_nodes = []
+ self.all_nodes = set({rule_node})
+ pattern = [(0, rule_node, "/RULE"), (0, 1, 2)]
+ for assignment in solver.assignments(pattern):
+ self.all_nodes.add(assignment[0])
+ value, key = assignment[1], assignment[2]
+ try:
+ node_type = next(node_type for node_type in NODE_TYPES
+ if key.startswith(node_type))
+ except StopIteration:
+ continue
+ if node_type != "/NO_MAP":
+ assert node_type == key
+ self.nodes_by_type[key].append(value)
+ else:
+ index = int(key.split("/NO_MAP")[1].strip("_") or 0)
+ self.nodes_by_type[node_type][index].append(value)
+ if node_type in MAP_TYPES:
+ self.map_nodes.append(value)
+ self.all_nodes.add(value)
+
+ self.equal = defaultdict(set)
+ for assignment in solver.assignments([(0, rule_node, "/RULE"),
+ (0, 1, "/="),
+ (0, 2, "/=")]):
+ # NOTE that assignments will call this for each permutation of the
+ # two, so we don't actually need to special-case that.
+ self.equal[assignment[1]].add(assignment[2])
+
+ self.maybe_equal = defaultdict(set)
+ for assignment in solver.assignments([(0, rule_node, "/RULE"),
+ (0, 1, "/MAYBE="),
+ (0, 2, "/MAYBE=")]):
+ # NOTE see above.
+ self.maybe_equal[assignment[1]].add(assignment[2])
+
+ def assign_variables(self):
+ """Gives each node in the mapping a variable name/ID/number.
+
+ Populates node_to_variable, variable_to_node, and
+ maybe_equal_variables.
+ """
+ # Every node that we might map against gets a variable name/ID.
+ # Variable ID -> Node
+ self.node_to_variable = dict()
+ self.variable_to_node = dict()
+ self.maybe_equal_variables = dict()
+ # It's tempting here to give variables just to nodes marked /MUST_MAP,
+ # but then you quickly run into problems because we often want to match
+ # for the /NO_MAP nodes, etc. Plus other parts of the code will often
+ # use variable numbers and node names interchangeably, so it's just
+ # easier to support variable numbers for all relevant nodes. Unused
+ # variable numbers don't have any negative impact.
+ for node in sorted(self.all_nodes):
+ variable = len(self.node_to_variable)
+ for equivalent in self.equal[node]:
+ try:
+ variable = self.node_to_variable[equivalent]
+ break
+ except KeyError:
+ continue
+ self.node_to_variable[node] = variable
+ # The last one will take priority; we sort the iteration order so
+ # this is deterministic.
+ self.variable_to_node[variable] = node
+ self.maybe_equal_variables[variable] = set({variable})
+
+ for key, values in self.maybe_equal.items():
+ key = self.node_to_variable[key]
+ values = set(map(self.node_to_variable.get, values))
+ self.maybe_equal_variables[key].update(values)
+
+ def prepare_constraints(self):
+ """Extracts the constraints corresponding to the rule.
+
+ MUST be called after parse_rule().
+ """
+ ts = self.ts
+ node_to_variable = utils.Translator(self.node_to_variable)
+
+ # We will keep track of which nodes have constraints on them to avoid
+ # free nodes.
+ constrained = set()
+
+ no_map = dict({
+ node: index for index in self.nodes_by_type["/NO_MAP"].keys()
+ for node in self.nodes_by_type["/NO_MAP"][index]})
+ no_map_nodes = set(no_map.keys())
+
+ def pattern():
+ return Pattern(self.runtime, [], self.maybe_equal_variables,
+ self.variable_to_node)
+ self.must_pattern = pattern()
+ self.try_pattern = pattern()
+ self.never_patterns = defaultdict(pattern)
+ relevant_facts = (fact for node in self.map_nodes
+ for fact in ts.lookup(node, None, None))
+ for fact in relevant_facts:
+ constrained.update(fact)
+ constraint = node_to_variable.translate_tuple(fact)
+ arguments = set(fact)
+ if arguments & no_map_nodes:
+ index = next(no_map[argument] for argument in fact
+ if argument in no_map)
+ self.never_patterns[index].add_constraint(constraint)
+ elif arguments & set(self.nodes_by_type["/TRY_MAP"]):
+ self.try_pattern.add_constraint(constraint)
+ elif arguments & set(self.nodes_by_type["/INSERT"]):
+ # We can try or fail to map against INSERT nodes, but they
+ # should never be must_maps.
+ pass
+ else:
+ assert set(fact) & set(self.nodes_by_type["/MUST_MAP"])
+ self.must_pattern.add_constraint(constraint)
+
+ assert not no_map_nodes & set(self.nodes_by_type["/TRY_MAP"])
+ assert set(self.map_nodes) <= constrained
+
+ def invalid(self, assignment):
+ """True iff @assignment allows some of the /NEVER_MAPs to map.
+ """
+ return any(not utils.is_empty(pattern.assignments(assignment))
+ for pattern in self.never_patterns.values())
diff --git a/runtime/runtime.py b/runtime/runtime.py
new file mode 100644
index 0000000..a65596e
--- /dev/null
+++ b/runtime/runtime.py
@@ -0,0 +1,96 @@
+"""Methods for parsing and executing TripletStructures according to their rules.
+"""
+# pylint: disable=import-error,no-name-in-module
+from runtime.cpp_structure import CPPStructure
+from runtime.production_rule import ProductionRule
+from runtime.interactive import TSREPL
+from runtime.matcher import OneOffMatcher
+
+class TSRuntime:
+ """A runtime for interpreting and executing triplet structures.
+ """
+ def __init__(self, ts):
+ """Initializes a new TSRuntime.
+ """
+ ts.commit(False)
+ self.ts = ts
+ # The Solver handles the 'dirty work' of actually finding matches to
+ # rule implicants.
+ self.solver = CPPStructure(self.ts)
+ self.extract_rules()
+ ts.commit(False)
+
+ def interactive(self):
+ """Begins an interactive runtime session.
+ """
+ repl = TSREPL(self)
+ repl.run()
+
+ def extract_rules(self):
+ """Populates self.rules and deletes the structure nodes.
+
+ NOTE: This function removes all rule-related nodes from the structure
+ after parsing them into ProductionRules. This is a relatively cheap and
+ easy way to ensure we don't have to worry about rules applying to other
+ rules, etc. In the long run, though, it would be nice to support
+ reflective rules.
+ """
+ ts = self.ts
+
+ self.rules = []
+ self.rules_by_name = dict()
+
+ rule_nodes = set(fact[1] for fact in ts.lookup(None, None, "/RULE"))
+ for rule_node in sorted(rule_nodes):
+ rule = ProductionRule(self, rule_node)
+ self.rules.append(rule)
+ self.rules_by_name[rule.name] = rule
+
+ avoid_nodes = set(avoid_node for rule in self.rules
+ for avoid_node in rule.all_nodes)
+ for avoid_node in avoid_nodes:
+ ts[avoid_node].remove_with_facts()
+ for node in ts.nodes.copy():
+ if not node.startswith("/:"):
+ ts[node].remove()
+
+ def matcher_propose(self, matcher):
+ """Propose TSDeltas based on a Matcher.
+
+ Yields (assignment, delta) pairs, where delta = assignment.produce().
+ """
+ assert self.ts.is_clean()
+
+ for assignment in matcher.assignments():
+ assignment = assignment.apply()
+ delta = self.ts.commit(commit_if_clean=True)
+
+ if delta:
+ yield (assignment, delta)
+ if self.ts.path[-1] is delta:
+ self.ts.rollback(-1)
+
+ def propose(self, rule, partial=None):
+ """Propose TSDeltas based on the rules.
+
+ Yields (assignment, delta) pairs, where delta = assignment.produce().
+ """
+ rule = self.rules_by_name[rule]
+ partial = partial or dict()
+ matcher = OneOffMatcher(self, rule, partial or dict())
+ yield from self.matcher_propose(matcher)
+
+ def propose_all(self, rules=None):
+ """Helper to yield proposals from multiple ProductionRules.
+ """
+ if rules is None:
+ rules = [rule.name for rule in self.rules]
+ for rule in rules:
+ yield from self.propose(rule)
+
+ def get_rule(self, name):
+ """Returns the ProductionRule associated with @name.
+
+ In the structure, there is some fact (A, @node, "/RULE").
+ """
+ return next(rule for rule in self.rules if rule.name == name)
diff --git a/runtime/shadow_input.py b/runtime/shadow_input.py
new file mode 100644
index 0000000..27b82d8
--- /dev/null
+++ b/runtime/shadow_input.py
@@ -0,0 +1,89 @@
+"""Helper class allowing users to record and play-back input across invocations.
+"""
+import os
+
+RECORDINGS_DIR = "{}/recordings".format(os.environ.get("BUILD_WORKING_DIRECTORY", "."))
+
+class ShadowInput:
+ """Class for recording, saving, and re-playing user input.
+
+ This is used in interactive.py to make replaying and debugging complex
+ sequences of commands easier.
+
+ Initialization: input_ = ShadowInput()
+
+ Usage: replace all instances of input(...) with input_(...).
+
+ NOTE: the string "__FINISH__" has special meaning in our saved recordings,
+ so you should never expect users to input it.
+ """
+ def __init__(self):
+ """Initializes a new ShadowInput.
+ """
+ # Used as a stack of recordings that we're currently reading from
+ # (needed eg. if one recording then loads another recording).
+ self.in_files = []
+ # The user's input(s) since the last begin_recording().
+ self.recording = []
+
+ def __call__(self, prompt):
+ """Prompt the user for input, then record and return it.
+
+ This is meant to be a drop-in replacement for input(prompt).
+ """
+ print(prompt, end="")
+ while self.in_files:
+ line = self.in_files[-1].readline()
+ # When __FINISH__ is encountered in a recording, we finish the
+ # recording immediately. This is useful for debugging.
+ if line == "" or line.strip() == "__FINISH__":
+ # This file is finished, move on to the next one.
+ self.in_files[-1].close()
+ self.in_files.pop()
+ continue
+ # We read a line from the file successfully.
+ line = line.strip()
+ print(line, end="\n")
+ break
+ else:
+ # There are no loaded recordings; read an input from the user and
+ # record it.
+ line = input()
+ self.recording.append(line)
+ return line
+
+ def begin_recording(self):
+ """Clears the recorded user input.
+ """
+ self.recording = []
+
+ def scrub_last(self, n_inputs):
+ """Removes the last @n_inputs lines of user input.
+
+ Used eg. to remove the user input that led to a call to save_recording
+ (so the recording doesn't try to save itself unnecessarily).
+ """
+ self.recording = self.recording[:-n_inputs]
+
+ def load_recording(self, file_name):
+ """Begins replaying a saved recording.
+ """
+ self.in_files.append(open(self.recording_path(file_name), "r"))
+
+ def save_recording(self, file_name):
+ """Saves the current recording.
+
+ NOTE: This does *NOT* clear the recording history; if you
+ save_recording again before calling begin_recording, the second
+ recording will have all the contents of the first one.
+ """
+ with open(self.recording_path(file_name), "w") as out_file:
+ out_file.write("\n".join(self.recording))
+
+ @staticmethod
+ def recording_path(file_name):
+ """Helper to return the absolute path corresponding to @file_name.
+
+ See RECORDINGS_DIR to set where recordings are stored/read from.
+ """
+ return "{}/{}".format(RECORDINGS_DIR, file_name)
diff --git a/runtime/tests/BUILD b/runtime/tests/BUILD
new file mode 100644
index 0000000..c3652c2
--- /dev/null
+++ b/runtime/tests/BUILD
@@ -0,0 +1,20 @@
+py_test(
+ name = "test_cpp_structure",
+ size = "small",
+ srcs = ["test_cpp_structure.py"],
+ deps = [
+ "//:ts_lib",
+ "//runtime:cpp_structure",
+ "@bazel_python//:pytest_helper",
+ ],
+)
+
+py_test(
+ name = "test_utils",
+ size = "small",
+ srcs = ["test_utils.py"],
+ deps = [
+ "//runtime:utils",
+ "@bazel_python//:pytest_helper",
+ ],
+)
diff --git a/runtime/tests/test_cpp_structure.py b/runtime/tests/test_cpp_structure.py
new file mode 100644
index 0000000..dda4467
--- /dev/null
+++ b/runtime/tests/test_cpp_structure.py
@@ -0,0 +1,53 @@
+"""Tests for cpp_structure.py"""
+from collections import defaultdict
+from external.bazel_python.pytest_helper import main
+from ts_lib import TripletStructure
+from runtime.cpp_structure import CPPStructure, CPPPattern
+
+def test_simple_constraints():
+ """Tests the CPPStructure class."""
+ ts = TripletStructure()
+ ts[":A"].map({ts[":B"]: ts[":C"]})
+ ts_cpp = CPPStructure(ts)
+ ts[":B"].map({ts[":C"]: ts[":A"]})
+ ts[":B"].map({ts[":C"]: ts[":X"]})
+
+ # Removed fact.
+ ts[":B"].map({ts[":B"]: ts[":B"]})
+ constraints = [(0, 0, 0)]
+ assert list(ts_cpp.assignments(constraints)) == [dict({0: "/:B"})]
+ ts.remove_fact(("/:B", "/:B", "/:B"))
+ assert list(ts_cpp.assignments(constraints)) == []
+
+ # Unquantified test.
+ constraints = [("/:A", "/:B", "/:C"), ("/:B", "/:C","/:A")]
+ assert list(ts_cpp.assignments(constraints)) == [dict({})]
+
+ constraints = [("/:Wrong", "/:B", "/:C"), ("/:B", "/:C","/:A")]
+ assert list(ts_cpp.assignments(constraints)) == []
+ constraints = [("/:A", "/:B", "/:B")]
+ assert list(ts_cpp.assignments(constraints)) == []
+
+ # Maybe_equals test.
+ constraints = [(5, "/:B", 6), (7, 0, 1), (7, 2, 3)]
+ assert not list(ts_cpp.assignments(constraints))
+
+ maybe_equal = defaultdict(set)
+ for variable in set({5, 1}):
+ maybe_equal[variable] = set({5, 1})
+ for variable in set({6, 0, 2}):
+ maybe_equal[variable] = set({6, 0, 2})
+ truth = [dict({
+ 5: "/:A",
+ 6: "/:C",
+ 7: "/:B",
+ 0: "/:C",
+ 1: "/:A",
+ 2: "/:C",
+ 3: "/:X",
+ })]
+ assert list(ts_cpp.assignments(constraints, maybe_equal)) == truth
+ # Test we can pull from the cache correctly.
+ assert list(ts_cpp.assignments(constraints, maybe_equal)) == truth
+
+main(__name__, __file__)
diff --git a/runtime/tests/test_utils.py b/runtime/tests/test_utils.py
new file mode 100644
index 0000000..fcdc02d
--- /dev/null
+++ b/runtime/tests/test_utils.py
@@ -0,0 +1,71 @@
+"""Tests for utils.py"""
+from external.bazel_python.pytest_helper import main
+import runtime.utils as utils
+
+def test_freeze_thaw_dict():
+ """Tests freezedict/thawdict."""
+ x = dict({"hello": "there"})
+ assert utils.thawdict(utils.freezedict(x)) == x
+ assert isinstance(utils.freezedict(x), tuple)
+
+def test_is_empty():
+ """Tests is_empty(...)."""
+ assert utils.is_empty(x for x in range(0))
+ assert not utils.is_empty(x for x in range(5))
+
+def test_translator():
+ """Tests the Translator class."""
+ translator = utils.Translator(dict({
+ "hello": "bonjour",
+ "why": "porquoi",
+ "what": "quoi",
+ }))
+ assert translator.translate("hello") == "bonjour"
+ assert translator.translate("Matthew") == "Matthew"
+
+ assert (translator.translate_tuple(("hello", "Matthew"))
+ == ("bonjour", "Matthew"))
+
+ assert (translator.translate_tuples([("hello", "Matthew"),
+ ("why", "what")])
+ == [("bonjour", "Matthew"), ("porquoi", "quoi")])
+
+ assert (translator.translate_list(["hello", "Matthew"])
+ == ["bonjour", "Matthew"])
+
+ composed = translator.compose(dict({
+ "bonjour": "salam",
+ "porquoi": "chera",
+ "merci": "merci",
+ }))
+ assert composed == dict({"hello": "salam", "why": "chera"})
+ composed = translator.compose(dict({
+ "bonjour": "salam",
+ "porquoi": "chera",
+ "merci": "merci",
+ }), default_identity=True)
+ assert composed == dict({"hello": "salam", "why": "chera", "what": "quoi"})
+
+ concatenated = translator.concatenated_with(dict({
+ "thanks": "merci",
+ }))
+ assert concatenated == dict({
+ "hello": "bonjour",
+ "why": "porquoi",
+ "what": "quoi",
+ "thanks": "merci",
+ })
+
+def test_real_hash():
+ """Regression test for the real_hash method."""
+ truth = "ff42aa4718d9da1c7d491e8be8116f0c62db8d910de16e8ee0648147"
+ assert utils.real_hash(dict({"hello": "there"})) == truth
+ try:
+ # We don't yet support tuples, it should throw a NIE.
+ utils.real_hash(("hi", "hello"))
+ except NotImplementedError:
+ pass
+ else:
+ assert False
+
+main(__name__, __file__)
diff --git a/runtime/utils.py b/runtime/utils.py
new file mode 100644
index 0000000..d1a30ee
--- /dev/null
+++ b/runtime/utils.py
@@ -0,0 +1,88 @@
+"""Assorted helper functions for the TSRuntime."""
+import hashlib
+
+def freezedict(dictionary):
+ """Freezes a dict to a hashable form, e.g. to store in a set."""
+ return tuple(sorted(dictionary.items()))
+
+def thawdict(dictionary):
+ """Thaws a previously-frozen dict."""
+ return dict(dictionary)
+
+def is_empty(generator):
+ """True iff the generator is empty.
+
+ Used primarily to check if there are any satisfying assignments to a
+ pattern, eg. in production_rule.py.
+ """
+ try:
+ next(generator)
+ return False
+ except StopIteration:
+ return True
+
+class Translator:
+ """Helper class for translation dictionaries.
+
+ Useful, eg., when you want to translate between a set of constraints (using
+ variable names) and ``filled-in'' constraints according to some assignment.
+ """
+ def __init__(self, translation):
+ """Initializes the Translator.
+
+ @translation should be a dictionary.
+ """
+ self.translation = translation
+
+ def translate(self, element):
+ """Returns the translation of @element (or @element if no translation).
+ """
+ return self.translation.get(element, element)
+
+ def translate_tuple(self, elements):
+ """Translates all elements of tuple @elements in @self.translation."""
+ return tuple(map(self.translate, elements))
+
+ def translate_tuples(self, elements):
+ """Translates a list of tuples."""
+ return list(map(self.translate_tuple, elements))
+
+ def translate_list(self, elements):
+ """Translates each member of a list."""
+ return list(map(self.translate, elements))
+
+ def compose(self, after, default_identity=False):
+ """Returns the composition of @self.translation with the dict @after.
+
+ Used eg. when we have an assignment to _variables_ that we want to turn
+ in to an assignment to _nodes_ using a node-to-variable map.
+ """
+ composed = dict()
+ for key, value in self.translation.items():
+ try:
+ composed[key] = after[value]
+ except KeyError:
+ if default_identity:
+ composed[key] = value
+ return composed
+
+ def concatenated_with(self, other):
+ """Returns the concatenation of @self.translation and @after."""
+ concatenated = self.translation.copy()
+ concatenated.update(other)
+ return concatenated
+
+def real_hash(item):
+ """Returns a "cryptographically-secure-ish" hash of @item.
+
+ This is used in particular in assignment.py for giving newly-created nodes
+ unambiguous, reproducible names based only on their 'source' assignment.
+ """
+ if isinstance(item, str):
+ return hashlib.sha224(item.encode()).hexdigest()
+ if isinstance(item, dict):
+ # NOTE: this assumes that the str(...) does not include any
+ # non-deterministic information (eg. ids). Maybe it would be best to
+ # let real_hash operate directly on the sorted list.
+ return real_hash(str(sorted(item.items())))
+ raise NotImplementedError
diff --git a/tactic_utils.py b/tactic_utils.py
new file mode 100644
index 0000000..82244ba
--- /dev/null
+++ b/tactic_utils.py
@@ -0,0 +1,76 @@
+"""Collection of macros for writing Tactics.
+
+Matthew considers this to be written "in the DSL," although it's somewhat on
+the border.
+"""
+from runtime.matcher import Matcher, OneOffMatcher
+
+def SearchRules(rt, search_term):
+ """Returns all rules with @search_term in their name.
+
+ Used, for example, to quickly mark all successor pairs in a structure
+ (since currently "X is A, Y is B -> Successor(X, Y)" is a separate rule for
+ each A, B).
+ """
+ return [rule.name for rule in rt.rules if search_term in rule.name]
+
+def Fix(function, *args, **kwargs):
+ """Applies @function repeatedly until it returns False.
+
+ Many tactic functions return True if they made a change. This method allows
+ you to apply those tactics until fixedpoint is reached.
+ """
+ while function(*args, **kwargs):
+ pass
+
+def ApplyRulesMatching(rt, search_term, partial=None):
+ """Calls RuleFixedpoint for every rule containing @search_term.
+
+ Note that this is *all* it does --- it does not, e.g., ever repeat the 1st
+ rule after applying the 3rd. If you want the fixedpoint for all of the
+ rules together, you need to wrap this call in a Fix(...).
+ """
+ rules = SearchRules(rt, search_term)
+ did_anything = False
+ for rule in rules:
+ did_anything = RuleFixedpoint(rt, rule, partial) or did_anything
+ return did_anything
+
+MATCHERS = dict()
+def GetMatcher(rt, rule, partial, one_off=False):
+ """Returns a Matcher keeping track of applications to @rule."""
+ rule = rt.rules_by_name[rule]
+ if one_off:
+ return OneOffMatcher(rt, rule, partial)
+ key = (id(rt), rule, tuple(sorted(partial.items())))
+ if key not in MATCHERS:
+ MATCHERS[key] = Matcher(rt, rule, partial)
+ return MATCHERS[key]
+
+def RuleFixedpoint(rt, rule, partial=None):
+ """Given a rule, applies it repeatedly until fixedpoint is reached.
+ """
+ matcher = GetMatcher(rt, rule, partial or dict({}))
+
+ did_anything = False
+ while True:
+ matcher.sync()
+ # NOTE: for correctness, rt.matcher_propose assumes you only ever use
+ # exactly one of the things it yields.
+ try:
+ _ = next(rt.matcher_propose(matcher))
+ except StopIteration:
+ break
+ did_anything = True
+ return did_anything
+
+def RuleAny(rt, rule, partial, one_off=True):
+ """True iff @rule has any matches extending @partial in the structure."""
+ matcher = GetMatcher(rt, rule, partial, one_off=one_off)
+ matcher.sync()
+ try:
+ _ = next(matcher.assignments())
+ return True
+ except StopIteration:
+ pass
+ return False
diff --git a/tests/BUILD b/tests/BUILD
new file mode 100644
index 0000000..df69535
--- /dev/null
+++ b/tests/BUILD
@@ -0,0 +1,20 @@
+py_test(
+ name = "test_ts_lib",
+ size = "small",
+ srcs = ["test_ts_lib.py"],
+ deps = [
+ "//:ts_lib",
+ "@bazel_python//:pytest_helper",
+ ],
+)
+
+py_test(
+ name = "test_ts_utils",
+ size = "small",
+ srcs = ["test_ts_utils.py"],
+ deps = [
+ "//:ts_lib",
+ "//:ts_utils",
+ "@bazel_python//:pytest_helper",
+ ],
+)
diff --git a/tests/test_ts_lib.py b/tests/test_ts_lib.py
new file mode 100644
index 0000000..b4a8a5d
--- /dev/null
+++ b/tests/test_ts_lib.py
@@ -0,0 +1,243 @@
+"""Tests for ts_lib.py"""
+# pylint: disable=pointless-statement,import-error
+import itertools
+from external.bazel_python.pytest_helper import main
+from ts_lib import TripletStructure
+
+def test_get_create_nodes():
+ """Tests that ts.__getitem__ works correctly."""
+ ts = TripletStructure()
+ ts[":A"]
+ assert ts.has_node("/:A")
+ ts[":B, :C"]
+ assert ts.has_node("/:B") and ts.has_node("/:C")
+ b0, b1 = ts[":B:??, :B:??"]
+ assert str(b0) == "/:B:0"
+ assert str(b1) == "/:B:1"
+ assert ts.has_node("/:B:0") and ts.has_node("/:B:1")
+ # Should be lexicographical.
+ assert b0 < b1
+
+ assert b0.display_name() == "/:B:0"
+ b0.display_name("b0")
+ assert b0.display_name() == "b0"
+
+def test_str():
+ """Tests TripletStructure.__str__"""
+ ts = TripletStructure()
+ ts[":A"].map({ts[":B"]: ts[":C"]})
+ truth = f"TripletStructure ({id(ts)}):\n\t('/:A', '/:B', '/:C')"
+ assert str(ts) == truth
+
+def test_shadow():
+ """Tests that we can shadow operations on the structure."""
+ # pylint: disable=missing-function-docstring
+ class ShadowStructure():
+ """Dummy shadower."""
+ def __init__(self):
+ self.log = []
+ def add_node(self, node):
+ self.log.append(("+", node))
+ def remove_node(self, node):
+ self.log.append(("-", node))
+ def add_fact(self, fact):
+ self.log.append(("+", fact))
+ def remove_fact(self, fact):
+ self.log.append(("-", fact))
+ ts = TripletStructure()
+ ts.shadow = ShadowStructure()
+ ts[":A"].map({ts[":B"]: ts[":C"]})
+ ts[":C"].remove_with_facts()
+ # Shouldn't re-remove it.
+ ts.remove_fact(("/:A", "/:B", "/:C"))
+ assert len(ts.shadow.log) == 6
+ assert (set(ts.shadow.log[:3])
+ == set({("+", "/:A"), ("+", "/:B"), ("+", "/:C")}))
+ assert ts.shadow.log[3] == ("+", ("/:A", "/:B", "/:C"))
+ assert ts.shadow.log[4] == ("-", ("/:A", "/:B", "/:C"))
+ assert ts.shadow.log[5] == ("-", "/:C")
+
+def test_scope():
+ """Tests that scopes work correctly."""
+ ts = TripletStructure()
+ scope = ts.scope("/:Scope")
+ assert str(scope[":A"]) == "/:Scope:A"
+ assert ts.has_node("/:Scope:A")
+ assert scope.protected()[":A"] == "/:Scope:A"
+ sub_scope = scope.scope(":Sub")
+ assert str(sub_scope[":A"]) == "/:Scope:Sub:A"
+ assert str(sub_scope["/:A"]) == "/:A"
+ assert list(sub_scope) == [sub_scope[":A"]]
+ assert scope[":Sub:A"] in sub_scope
+ assert scope[":A"] not in sub_scope
+ assert sub_scope[":A"] - scope == ":Sub:A"
+ assert sub_scope[":A"] - ts.scope(":Hello") == "/:Scope:Sub:A"
+ with ts.scope(":Scope"):
+ assert ts.scope().prefix == "/:Scope"
+ with ts.scope(":Sub"):
+ assert ts.scope().prefix == "/:Scope:Sub"
+ assert ts[":A"] == sub_scope[":A"]
+ assert ts.scope().prefix == "/:Scope"
+ assert ts[":Sub:A"] == sub_scope[":A"]
+
+def test_freeze_frame():
+ """Tests TSFreezeFrame."""
+ ts = TripletStructure()
+ ts[":A"].map({ts[":B"]: ts[":C"]})
+ freeze_frame = ts.freeze_frame()
+ assert freeze_frame.nodes == set({"/:A", "/:B", "/:C"})
+ assert freeze_frame.facts == set({("/:A", "/:B", "/:C")})
+ ts[":D"].map({ts[":E"]: ts[":B"]})
+ ts[":C"].remove_with_facts()
+
+ delta = freeze_frame.delta_to_reach(ts.freeze_frame())
+ assert delta.add_nodes == set({"/:D", "/:E"})
+ assert delta.add_facts == set({("/:D", "/:E", "/:B")})
+ assert delta.remove_nodes == set({"/:C"})
+ assert delta.remove_facts == set({("/:A", "/:B", "/:C")})
+
+ ts.commit()
+ ts.freeze_frame().delta_to_reach(freeze_frame).apply()
+ assert freeze_frame == ts.freeze_frame()
+
+def test_delta():
+ """Tests TSDelta and TSRecorder."""
+ ts = TripletStructure()
+ assert not ts.buffer and ts.is_clean()
+ ts.commit(commit_if_clean=False)
+ assert ts.path == [None]
+ ts.commit(commit_if_clean=True)
+ assert not ts.path[1]
+
+ ts[":A"].map({ts[":B"]: [ts[":C"], ts[":D"]]})
+ assert ts.buffer and not ts.is_clean()
+ assert ts.buffer.add_nodes == set({"/:A", "/:B", "/:C", "/:D"})
+ assert ts.buffer.add_facts == set({("/:A", "/:B", "/:C"),
+ ("/:A", "/:B", "/:D")})
+ assert ts.buffer.remove_nodes == ts.buffer.remove_facts == set()
+ ts.commit()
+ assert ts.path[2]
+ assert not ts.buffer
+ assert ts.is_clean()
+
+ recording = ts.start_recording()
+ before = ts.freeze_frame()
+ ts.rollback(0)
+ assert before == ts.freeze_frame()
+
+ ts[":D"].remove_with_facts()
+ remove_D = ts.commit()
+ assert recording.commits() == [remove_D]
+ ts.rollback(-1)
+ assert recording.commits() == []
+ assert before == ts.freeze_frame()
+
+ ts[":C"].remove_with_facts()
+ remove_C = ts.commit()
+ assert recording.commits() == [remove_C]
+ ts.rollback(len(ts.path) - 1)
+ assert recording.commits() == []
+ assert before == ts.freeze_frame()
+
+ ts[":A"].remove_with_facts()
+ remove_A = ts.commit()
+ assert recording.commits(rollback=True) == [remove_A]
+ assert recording.commits() == []
+ assert before == ts.freeze_frame()
+
+ ts[":A"].map({ts[":D"]: ts[":E"]})
+ ts[":C"].remove_with_facts()
+ truth = f"TSDelta ({id(ts.buffer)}):"
+ truth += "\n\t- Nodes: \n\t\t/:C"
+ truth += "\n\t+ Nodes: \n\t\t/:E"
+ truth += "\n\t- Facts: \n\t\t('/:A', '/:B', '/:C')"
+ truth += "\n\t+ Facts: \n\t\t('/:A', '/:D', '/:E')"
+ assert str(ts.buffer) == truth
+
+def test_remove_node_without_facts():
+ """Tests that a node cannot be removed while it has linked facts.
+
+ Adapted from Zhe's example on PR #421.
+ """
+ ts = TripletStructure()
+ ts[":A"].map({ts[":B"]: ts[":C"], ts[":C"]: ts[":B"]})
+ for full_name in ("/:A", "/:B", "/:C"):
+ try:
+ ts.remove_node(full_name)
+ except AssertionError:
+ pass
+ else:
+ assert False, "TripletStructure let me remove a useful node."
+ try:
+ ts[full_name].remove()
+ except AssertionError:
+ pass
+ else:
+ assert False, "TripletStructure let me remove a useful node."
+
+def test_remove_node_with_facts():
+ """Tests that we can successfully remove a node with all of its facts.
+
+ Adapted from Zhe's example on PR #421.
+ """
+ ts = TripletStructure()
+ node_names = ("/:A", "/:B", "/:C")
+
+ def run_for_node(ts, node_name):
+ ts[":A"].map({ts[":B"]: ts[":C"], ts[":C"]: ts[":B"]})
+ assert any(fact_list for fact_list in ts.facts.values())
+ assert all(ts.has_node(full_name) for full_name in node_names)
+ ts[node_name].remove_with_facts()
+
+ assert not any(fact_list for fact_list in ts.facts.values())
+ assert not ts.has_node(node_name)
+
+ for node_name in node_names:
+ run_for_node(ts, node_name)
+
+def test_fact_invariants():
+ """Tests that some desired invariants always hold."""
+ ts = TripletStructure()
+ # Try adding nodes
+ node_names = ["/:{}".format(i) for i in range(1000)]
+ for node_name, next_node_name in zip(node_names, node_names[1:]):
+ ts[node_name]
+ assert ts.has_node(node_name)
+ assert not ts.has_node(next_node_name)
+ assert not ts.has_node(node_names[-1])
+ ts.add_node(node_names[-1])
+ assert ts.has_node(node_names[-1])
+
+ # Try bulk removing/adding nodes.
+ ts.remove_nodes(node_names)
+ assert not any(ts.has_node(name) for name in node_names)
+ ts.add_nodes(node_names)
+ assert all(ts.has_node(name) for name in node_names)
+
+ # Try adding/removing facts and ensure the invariants hold.
+ def assert_fact_invariant(ts, fact):
+ if ts.facts[fact]:
+ relevant_keys = list(ts._iter_subfacts(fact))
+ else:
+ relevant_keys = []
+ for key in ts.facts.keys():
+ assert (fact in ts.facts[key]) == (key in relevant_keys)
+
+ fact = (node_names[0], node_names[1], node_names[2])
+ ts.add_fact(fact)
+ for maybe_fact in itertools.permutations(fact):
+ assert_fact_invariant(ts, maybe_fact)
+ for maybe_fact in itertools.permutations(node_names[155:160], 3):
+ assert_fact_invariant(ts, maybe_fact)
+ ts.add_fact(maybe_fact)
+ assert_fact_invariant(ts, maybe_fact)
+ ts.remove_fact(maybe_fact)
+ assert_fact_invariant(ts, maybe_fact)
+
+ # Bulk adding/removing facts.
+ ts.add_facts(list(itertools.permutations(fact)))
+ assert all(ts.lookup(*fact) for fact in itertools.permutations(fact))
+ ts.remove_facts(list(itertools.permutations(fact)))
+ assert not any(ts.lookup(*fact) for fact in itertools.permutations(fact))
+
+main(__name__, __file__)
diff --git a/tests/test_ts_utils.py b/tests/test_ts_utils.py
new file mode 100644
index 0000000..8d9e350
--- /dev/null
+++ b/tests/test_ts_utils.py
@@ -0,0 +1,91 @@
+"""Tests for ts_utils.py"""
+# pylint: disable=pointless-statement,import-error
+from external.bazel_python.pytest_helper import main
+from ts_lib import TripletStructure
+from ts_utils import RegisterRule, RegisterPrototype, AssertNodesEqual
+
+def test_register_rule():
+ """Tests the RegisterRule(...) macro."""
+ ts = TripletStructure()
+ with ts.scope(":Rule"):
+ with ts.scope(":MustMap") as exist:
+ ts[":A"].map({ts[":B"]: ts[":C"]})
+ with ts.scope(":Insert"):
+ ts[":D"].map({exist[":A"]: ts["/:X"]})
+ with ts.scope(":Hello"):
+ ts[":E"].map({exist[":A"]: exist["/:B"]})
+ ts.commit()
+ RegisterRule(ts)
+ # The 3 facts above + 5 below
+ assert len(ts.lookup(None, None, None)) == 8
+ assert ts.lookup("/:Rule:RuleMap:0", "/:Rule:_", "/RULE")
+ assert ts.lookup("/:Rule:RuleMap:0", "/:Rule:MustMap:A", "/MUST_MAP")
+ assert ts.lookup("/:Rule:RuleMap:0", "/:Rule:MustMap:B", "/MUST_MAP")
+ assert ts.lookup("/:Rule:RuleMap:0", "/:Rule:MustMap:C", "/MUST_MAP")
+ assert ts.lookup("/:Rule:RuleMap:0", "/:Rule:Insert:D", "/INSERT")
+ freeze_frame = ts.freeze_frame()
+
+ # Now test custom_qualifiers argument.
+ ts.rollback(0) # Before registering the rule.
+ with ts.scope(":Rule"):
+ RegisterRule(ts, custom_qualifiers=dict({":Hello": "/INSERT"}))
+ delta = ts.freeze_frame() - freeze_frame
+ assert not (delta.add_nodes or delta.remove_nodes or delta.remove_facts)
+ assert (delta.add_facts
+ == set({("/:Rule:RuleMap:0", "/:Rule:Hello:E", "/INSERT")}))
+
+ # Now test auto_assert_equal
+ ts.rollback(0)
+ with ts.scope(":Rule"):
+ with ts.scope(":MustMap") as exist:
+ ts[":A"].map({ts[":B"]: ts[":C"]})
+ with ts.scope(":Insert"):
+ ts[":D"].map({ts[":B"]: ts["/:X"]})
+ RegisterRule(ts, auto_assert_equal=True)
+ delta = ts.freeze_frame() - freeze_frame
+ assert not (delta.remove_nodes or delta.remove_facts)
+ assert (delta.add_nodes
+ == set({"/=", "/:Rule:Insert:B", "/:Rule:Equivalence:0"}))
+ assert (delta.add_facts
+ == set({
+ ("/:Rule:Equivalence:0", "/:Rule:Insert:B", "/="),
+ ("/:Rule:Equivalence:0", "/:Rule:MustMap:B", "/="),
+ ("/:Rule:Equivalence:0", "/:Rule:_", "/RULE"),
+ ("/:Rule:Insert:D", "/:Rule:Insert:B", "/:X"),
+ ("/:Rule:RuleMap:0", "/:Rule:Insert:B", "/INSERT"),
+ }))
+
+def test_register_prototype():
+ """Tests the RegisterPrototype(...) macro."""
+ ts = TripletStructure()
+ with ts.scope(":Rule"):
+ ts[":A"].map({ts[":B"]: ts[":C"]})
+ ts[":D"].map({ts[":A"]: ts["/:X"]})
+ ts.commit()
+ RegisterPrototype(ts, dict({
+ ":RecognizeAIsX": {ts["/INSERT"]: [ts[":D"]]},
+ }), [])
+ # 2 above + 6 below
+ assert len(ts.lookup(None, None, None)) == 8
+ assert ts.lookup("/:Rule:RecognizeAIsX:RuleMap:0",
+ "/:Rule:RecognizeAIsX:_",
+ "/RULE")
+ assert ts.lookup("/:Rule:RecognizeAIsX:RuleMap:0", "/:Rule:D", "/INSERT")
+ assert ts.lookup("/:Rule:RecognizeAIsX:RuleMap:0", "/:Rule:D", "/NO_MAP")
+ assert ts.lookup("/:Rule:RecognizeAIsX:RuleMap:0", "/:Rule:A", "/MUST_MAP")
+ assert ts.lookup("/:Rule:RecognizeAIsX:RuleMap:0", "/:Rule:B", "/MUST_MAP")
+ assert ts.lookup("/:Rule:RecognizeAIsX:RuleMap:0", "/:Rule:C", "/MUST_MAP")
+
+ with ts.scope(":Rule"):
+ AssertNodesEqual(ts, ts[":B, :C"], ":RecognizeAIsX")
+ AssertNodesEqual(ts, ts[":A, :B"], ":RecognizeAIsX",
+ equal_type="/MAYBE=")
+ freeze_frame = ts.freeze_frame()
+ ts.rollback()
+ with ts.scope(":Rule"):
+ RegisterPrototype(ts, dict({
+ ":RecognizeAIsX": {ts["/INSERT"]: [ts[":D"]]},
+ }), [ts[":B, :C"]], [ts[":A, :B"]])
+ assert ts.freeze_frame() == freeze_frame
+
+main(__name__, __file__)
diff --git a/ts_cpp/.lvimrc b/ts_cpp/.lvimrc
new file mode 100644
index 0000000..c9f7e91
--- /dev/null
+++ b/ts_cpp/.lvimrc
@@ -0,0 +1,2 @@
+set shiftwidth=2
+set softtabstop=2
diff --git a/ts_cpp/setup.py b/ts_cpp/setup.py
new file mode 100644
index 0000000..37111f3
--- /dev/null
+++ b/ts_cpp/setup.py
@@ -0,0 +1,19 @@
+"""Setup script for the Triplet structure C++ extensions.
+
+See https://docs.python.org/3/extending/building.html
+"""
+from distutils.core import setup, Extension
+from glob import glob
+import pybind11
+
+TC_CPP_MODULE = Extension("ts_cpp",
+ include_dirs=[pybind11.get_include()],
+ extra_compile_args=["-O3", "-std=c++11"],
+ sources=glob("*.cc"))
+
+setup(name="ts_cpp",
+ version="1.0",
+ description="Optimized triplet structure extension",
+ author="Matthew A. Sotoudeh",
+ author_email="masotoudeh@ucdavis.edu",
+ ext_modules=[TC_CPP_MODULE])
diff --git a/ts_cpp/solver.cc b/ts_cpp/solver.cc
new file mode 100644
index 0000000..7601f50
--- /dev/null
+++ b/ts_cpp/solver.cc
@@ -0,0 +1,194 @@
+#include <tuple>
+#include <string>
+#include "ts_lib.h"
+#include <iostream>
+
+inline int Solver::CurrentVariable() const {
+ return -current_index_;
+}
+
+inline bool Solver::IsVariable(int node) const {
+ return node <= 0;
+}
+
+Solver::Solver(const Structure &structure, const size_t n_variables,
+ const std::vector<Triplet> &constraints,
+ const std::vector<std::set<size_t>> &maybe_equal)
+ : structure_(structure), n_variables_(n_variables), valid_(true),
+ var_to_constraints_(n_variables, std::vector<size_t>({})),
+ may_equal_(maybe_equal), assignment_(n_variables, 0),
+ states_(n_variables, State()), current_index_(0) {
+ assert(n_variables > 0);
+ for (size_t constraint_i = 0;
+ constraint_i < constraints.size();
+ constraint_i++) {
+ auto &constraint = constraints.at(constraint_i);
+ bool any_variables = false;
+ for (size_t i = 0; i < 3; i++) {
+ if (IsVariable(constraint[i])) {
+ var_to_constraints_.at(-constraint[i]).push_back(constraints_.size());
+ any_variables = true;
+ }
+ }
+ if (any_variables) {
+ constraints_.push_back(constraint);
+ } else if (!structure_.IsTrue(constraint)) {
+ valid_ = false;
+ break;
+ }
+ }
+ if (valid_) {
+ working_constraints_ = constraints_;
+ // Initializes states_[0].
+ GetOptions();
+ }
+}
+
+std::vector<Node> Solver::NextAssignment() {
+ if (!valid_ || n_variables_ == 0) {
+ return {};
+ }
+ // current_index_ goes to -1 when we backtrack from the initial state.
+ while (current_index_ >= 0) {
+ auto &state = states_[current_index_];
+
+ // If we have no more options for this variable, backtrack.
+ if (state.options_it == state.options.end()) {
+ UnAssign();
+ continue;
+ }
+
+ // Otherwise, we need to pick a variable assignment and go down.
+ Assign(*state.options_it);
+ // Increment the pointer for the current state so the next time we get back
+ // here we go on to the next one.
+ // TODO(masotoud): we can roll all of this up into a do-it-all Assign()
+ // method.
+ state.options_it++;
+
+ // If this is a valid assignment, return it and backtrack.
+ if (current_index_ == n_variables_) {
+ // TODO(masotoud): we can reorganize this so a copy isn't necessary.
+ std::vector<Node> copy = assignment_;
+ UnAssign();
+ return copy;
+ }
+
+ // Otherwise, initialize the next state.
+ GetOptions();
+ }
+ valid_ = false;
+ return {};
+}
+
+void Solver::Assign(const Node to) {
+ assignment_[current_index_] = to;
+ int var = CurrentVariable();
+ for (auto &i : var_to_constraints_[current_index_]) {
+ for (size_t j = 0; j < 3; j++) {
+ if (working_constraints_[i][j] == var) {
+ working_constraints_[i][j] = to;
+ }
+ }
+ }
+ current_index_++;
+}
+
+void Solver::UnAssign() {
+ // This is usually called when current_index_ in [1, n_variables_], if it's 0
+ // then we're backtracking from the root node (i.e., we're done).
+ current_index_--;
+ if (current_index_ < 0) {
+ return;
+ }
+ int var = CurrentVariable();
+ for (auto &i : var_to_constraints_.at(current_index_)) {
+ for (size_t j = 0; j < 3; j++) {
+ if (constraints_[i][j] == var) {
+ working_constraints_[i][j] = constraints_[i][j];
+ }
+ }
+ }
+}
+
+void Solver::GetOptions() {
+ int var = CurrentVariable();
+ if (current_index_ >= n_variables_ || current_index_ < 0) {
+ return;
+ }
+ // Set to 'true' after the first iteration. We want options to be an
+ // intersection of all the local_options, so we use this to initialize it to
+ // the first local_option. We could also just check options.empty(), as we
+ // break once options goes empty otherwise, but I think this is a bit more
+ // explicit and allows the loop to work even without the break.
+ bool initialized_options = false;
+ std::set<Node> &options = states_.at(current_index_).options;
+ // For each constraint triplet...
+ for (auto &i : var_to_constraints_.at(current_index_)) {
+ // (1) Replace the variable in question with 0. E.g. if we're solving for
+ // -1 and we have constraint (-1, 2, -2), we get (0, 2, 0) as emptied and
+ // hole_is_var = (1, 0, 0).
+ // NOTE: 0 is a variable *AS WELL AS* the indicator for an empty node. This
+ // is actually not ambiguous --- empty nodes are only valid in
+ // Structure::Lookup, within which variables are *in*valid.
+ Triplet emptied(working_constraints_[i]);
+ bool hole_is_var[3];
+ for (size_t j = 0; j < 3; j++) {
+ hole_is_var[j] = (emptied[j] == var);
+ if (IsVariable(emptied[j])) {
+ emptied[j] = 0;
+ }
+ }
+ // (2) Look at all the matching facts and unify them to figure out what the
+ // valid assignments to @var are. Note that we want a running intersection
+ // with @options.
+ std::set<Node> local_options;
+ for (auto &triplet : structure_.Lookup(emptied)) {
+ Node choice = 0;
+ // In theory we can avoid this loop (and hole_is_var)
+ for (size_t j = 0; j < 3; j++) {
+ if (!hole_is_var[j]) {
+ // This hole is not relevant to the assignment of @var. E.g. var =
+ // -1, constraints = (-1, 2, -2), and the fact is (5, 4, 6) --- 6 is
+ // not relevant.
+ continue;
+ } else if (choice == 0) {
+ // This is only the case when @choice is unset.
+ choice = triplet[j];
+ } else if (choice != triplet[j]) {
+ // There's some inconsistency. E.g. if the constraint is (-1, 2, -1)
+ // which gets mapped to emptied (0, 2, 0) which also maps against (5,
+ // 6, 7). In that case, choice == 0 because 5 != 7. We can probably
+ // avoid dealing with this (and hole_is_var) by expanding the memory
+ // usage of Structure, but I don't think it will be worth it.
+ choice = 0;
+ break;
+ }
+ }
+ // If we actually found a consistent assignment...
+ if (choice > 0) {
+ // We eventually want options &= local_options, so we just make
+ // local_options the intersection immediately.
+ if (!initialized_options || options.count(choice) > 0) {
+ local_options.insert(choice);
+ }
+ }
+ }
+ options = std::move(local_options);
+ initialized_options = true;
+ if (options.empty()) {
+ break;
+ }
+ }
+ // (3) Check that we're not (incorrectly) re-assigning the same node to
+ // different variables.
+ std::set<size_t> &may_equal = may_equal_[current_index_];
+ for (size_t i = 0; i < current_index_; i++) {
+ if (options.count(assignment_[i]) > 0 && may_equal.count(i) == 0) {
+ // We're saying it's OK to assign it to V, but already i->V and we may
+ // not equal i.
+ options.erase(assignment_[i]);
+ }
+ }
+ states_[current_index_].options_it = options.begin();
+}
diff --git a/ts_cpp/structure.cc b/ts_cpp/structure.cc
new file mode 100644
index 0000000..6c3e586
--- /dev/null
+++ b/ts_cpp/structure.cc
@@ -0,0 +1,68 @@
+#include <tuple>
+#include <iostream>
+#include <algorithm>
+#include <vector>
+#include <string>
+#include "ts_lib.h"
+
+void Structure::AddFact(const Triplet &fact) {
+ assert(!IsTrue(fact));
+ Triplet key(fact);
+ for (uint8_t i = 0; i < 8; i++) {
+ for (uint8_t j = 0; j < 3; j++) {
+ if ((i >> j) & 0b1) {
+ key[j] = fact[j];
+ } else {
+ key[j] = Node(0);
+ }
+ }
+ facts_[key].push_back(fact);
+ }
+}
+
+void Structure::RemoveFact(const Triplet &fact) {
+ assert(IsTrue(fact));
+ Triplet key(fact);
+ for (uint8_t i = 0; i < 8; i++) {
+ for (uint8_t j = 0; j < 3; j++) {
+ if ((i >> j) & 0b1) {
+ key[j] = fact[j];
+ } else {
+ key[j] = Node(0);
+ }
+ }
+ auto it = std::find(facts_[key].begin(), facts_[key].end(), fact);
+ assert(it != facts_[key].end());
+ facts_[key].erase(it);
+ }
+}
+
+void Structure::AddFactPy(Node i, Node j, Node k) {
+ AddFact(Triplet(i, j, k));
+}
+
+void Structure::RemoveFactPy(Node i, Node j, Node k) {
+ RemoveFact(Triplet(i, j, k));
+}
+
+const std::vector<Triplet> &Structure::Lookup(const Triplet &fact) const {
+ try {
+ return facts_.at(fact);
+ } catch (std::out_of_range &) {
+ // TODO(masotoud): Maybe not?
+ return empty_;
+ }
+}
+
+bool Structure::AllTrue(const std::vector<Triplet> &facts) const {
+ for (auto &fact : facts) {
+ if (!IsTrue(fact)) {
+ return false;
+ }
+ }
+ return true;
+}
+
+bool Structure::IsTrue(const Triplet &fact) const {
+ return facts_.count(fact) > 0 && !facts_.at(fact).empty();
+}
diff --git a/ts_cpp/ts_lib.cc b/ts_cpp/ts_lib.cc
new file mode 100644
index 0000000..cbe0a19
--- /dev/null
+++ b/ts_cpp/ts_lib.cc
@@ -0,0 +1,26 @@
+#include <pybind11/pybind11.h>
+#include <pybind11/stl.h>
+#include "ts_lib.h"
+
+namespace py = pybind11;
+
+PYBIND11_MODULE(ts_cpp, m) {
+ py::class_<Triplet>(m, "Triplet")
+ .def(py::init<Node, Node, Node>());
+
+ py::class_<Structure>(m, "Structure")
+ .def(py::init<>())
+ .def("addFact", &Structure::AddFactPy)
+ .def("removeFact", &Structure::RemoveFactPy)
+ .def("lookup", &Structure::Lookup);
+
+ py::class_<Solver>(m, "Solver")
+ .def(py::init<
+ const Structure&,
+ const size_t,
+ const std::vector<Triplet>&,
+ const std::vector<std::set<size_t>>
+ >())
+ .def("isValid", &Solver::IsValid)
+ .def("nextAssignment", &Solver::NextAssignment);
+}
diff --git a/ts_cpp/ts_lib.h b/ts_cpp/ts_lib.h
new file mode 100644
index 0000000..68fe7be
--- /dev/null
+++ b/ts_cpp/ts_lib.h
@@ -0,0 +1,96 @@
+#ifndef TS_LIB_H_
+#define TS_LIB_H_
+
+#include <array>
+#include <string>
+#include <unordered_map>
+#include <unordered_set>
+#include <set>
+#include <stack>
+#include <vector>
+#include <cassert>
+
+// Nodes are > 0. Variables are <= 0. Where Nodes are expected, an 'empty' node
+// is represented by 0.
+
+#define Node int
+#define Variable int
+#define NodeOrVariable int
+
+class Triplet : public std::array<Node, 3> {
+ public:
+ Triplet(Node i, Node j, Node k) : std::array<Node, 3>({i, j, k}) {}
+};
+
+// https://en.cppreference.com/w/cpp/utility/hash
+namespace std {
+template<> struct hash<Triplet> {
+ std::size_t operator()(Triplet const& triplet) const noexcept {
+ std::size_t h1 = std::hash<int>{}(triplet[0]);
+ std::size_t h2 = std::hash<int>{}(triplet[1]);
+ std::size_t h3 = std::hash<int>{}(triplet[2]);
+ // TODO(masotoud): maybe profile with other combinations.
+ return h1 ^ (h2 << 1) ^ (h3 >> 1);
+ }
+};
+} // namespace std
+
+class Structure {
+ public:
+ void AddFact(const Triplet &fact);
+ void RemoveFact(const Triplet &fact);
+ void AddFactPy(Node i, Node j, Node k);
+ void RemoveFactPy(Node i, Node j, Node k);
+ const std::vector<Triplet> &Lookup(const Triplet &fact) const;
+ bool AllTrue(const std::vector<Triplet> &facts) const;
+ bool IsTrue(const Triplet &fact) const;
+
+ private:
+ std::unordered_map<Triplet, std::vector<Triplet>> facts_;
+ // TODO(masotoud)
+ std::vector<Triplet> empty_;
+};
+
+class Solver {
+ public:
+ Solver(const Structure &structure,
+ const size_t n_variables,
+ const std::vector<Triplet> &constraints,
+ const std::vector<std::set<size_t>> &maybe_equal);
+
+ bool IsValid() { return valid_; }
+ std::vector<Node> NextAssignment();
+ void Assign(const Node to);
+ void UnAssign();
+ void GetOptions();
+
+ private:
+ int CurrentVariable() const;
+ bool IsVariable(int node) const;
+
+ struct State {
+ State() : options(), options_it(options.begin()) { }
+ std::set<Node> options;
+ std::set<Node>::iterator options_it;
+ };
+
+ const Structure &structure_;
+ const size_t n_variables_;
+ bool valid_;
+ std::vector<Triplet> constraints_;
+ std::vector<Triplet> working_constraints_;
+ // Size: n_variables
+ std::vector<std::vector<size_t>> var_to_constraints_;
+ // Size: n_variables
+ std::vector<std::set<size_t>> may_equal_;
+ // Size: n_variables
+ std::vector<Node> assignment_;
+ // Size: n_variables
+ std::vector<State> states_;
+ // Range: [0, infty)
+ // NOTE: This is actually -current_variable_. Makes it more convenient for
+ // indexing into assignment_, states_, etc.
+ int current_index_ = 0;
+};
+
+#endif // TS_LIB_H_
diff --git a/ts_lib.py b/ts_lib.py
new file mode 100644
index 0000000..4415d10
--- /dev/null
+++ b/ts_lib.py
@@ -0,0 +1,590 @@
+"""Core library for describing triplet-structures in Python."""
+import itertools
+from collections import defaultdict
+
+class TripletStructure:
+ """Represents a triplet structure. Instances are usually named 'ts'.
+
+ A TripletStructure starts out empty, with no nodes and no facts. It can be
+ modified using the syntax: `ts["/:A"].map({ts["/:B"]: ts["/:C"]})` which
+ adds the fact `(/:A,/:B,/:C)` to the structure. By default, nodes are added
+ automatically upon first reference.
+
+ We want to be able to easily roll-back changes to the TripletStructure. Every
+ direct modification of the TripletStructure is automatically registered in the
+ TSDelta instance @ts.buffer. This acts as a buffer of changes. The method
+ @ts.commit(...) will commit this buffer, i.e., save it to the end of the
+ list @ts.path and replace @ts.buffer with a fresh TSDelta instance. You can
+ always re-construct the structure by applying the TSDeltas in @ts.path
+ successively to an empty structure, then applying @ts.buffer. When
+ @ts.buffer is empty, we say the structure is 'clean.' @ts.rollback(...) can
+ be used to restore the state of the structure to a particular commit in
+ @ts.path.
+
+ Generally, every change to a triplet structure is owned by some TSDelta
+ instance. @ts.path gives a list [None, delta_1, delta_2, ..., delta_n] of
+ TSDeltas.
+ """
+ def __init__(self):
+ """Initializes a new triplet structure."""
+ # A list of the names of all nodes in the structure.
+ self.nodes = []
+ # Maps full_name -> short_name. The short name will be used in
+ # user-facing printouts. We should maintain the invariant
+ # self.display_names.keys() == self.nodes.
+ self.display_names = dict()
+ # self.facts a pre-computed index of the facts in the structure. Keys
+ # are of two types:
+ # 1. Triplet keys with 'holes' represented by None. E.g.,
+ # self.facts[(None, x, None)] is a list of all facts containing `x`
+ # in the middle slot. To get a list of all facts, use
+ # self.facts[(None, None, None)].
+ # 2. Single-node keys. For a node string @x, self.facts[x] is all facts
+ # with x in at least one slot (see facts_about_node(...)).
+ # Notably, if a fact (A, B, C) is in the structure at all, then it
+ # *MUST* be belong to exactly the 11 keys returned by
+ # self._iter_subfacts((A,B,C)).
+ self.facts = defaultdict(list)
+ # A prefix applied to node lookups. See ts.scope(...) and
+ # ts.__getitem__.
+ self.current_scope = "/"
+ # The historical and running deltas.
+ self.path = [None]
+ self.buffer = TSDelta(self)
+ # (Optional) an object with [add,remove]_[node,fact] methods which will
+ # shadow changes to the structure. Used to implement efficient solving
+ # with the C++ extensions.
+ self.shadow = None
+
+ def __getitem__(self, node):
+ """Returns a (list of) NodeWrapper(s) corresponding to @node.
+
+ This is the main entrypoint to manipulation of the structure.
+
+ @node should be a string containing either (i) the name of a node, or
+ (ii) a comma-separated list of node names. Node names should not
+ contain spaces or commas.
+
+ If a node name ends in ":??", then the "??" will be replaced with the
+ smallest number such that the resulting node name does not yet exist in
+ the structure. Its use is somewhat analagous to LISP's gensym. See
+ ts_utils.py for example usage.
+
+ NOTE: ts[...] *CAN HAVE SIDE-EFFECTS*, namely _it constructs nodes
+ which don't already exist_. You may think of it as a Python
+ defaultdict. This makes for simpler code, but has a slight drawback of
+ making typos harder to catch. We may decide to change this syntax in
+ the future, to something like ts.node(name) or tc(name), but: (i) the
+ former would make quickly understanding 'tc-dense' code (like
+ mapper.py) difficult while (ii) the latter loses intuition.
+ """
+ if "," in node:
+ return [self[subname.strip()] for subname in node.split(",")]
+ full_name = self._full_name(node)
+ if full_name.endswith(":??"):
+ for i in itertools.count():
+ filled_name = "{}:{}".format(full_name[:-3], i)
+ if filled_name not in self.nodes:
+ full_name = filled_name
+ break
+ self.add_node(full_name)
+ return NodeWrapper(self, full_name)
+
+ def lookup(self, *template, read_direct=False):
+ """Returns all facts according to a given template.
+
+ This method should be called like ts.lookup(A,B,C) where A, B, C can be
+ either node names or Nones. Nones match against any node name.
+
+ Setting @read_direct=True returns a reference to the corresponding list
+ of facts stored on the Structure instance. _May_ sometimes improve
+ performance, but in general should be avoided due to unexpected
+ behavior when either this class or the returned list is modified.
+ """
+ if not read_direct:
+ return self.lookup(*template, read_direct=True).copy()
+ return self.facts[template]
+
+ def facts_about_node(self, full_name, read_direct=False):
+ """Returns all facts involving the node with name @full_name.
+
+ See self.lookup for nodes about @read_direct.
+ """
+ if not read_direct:
+ return self.facts_about_node(full_name, read_direct=True).copy()
+ return self.facts[full_name]
+
+ def scope(self, scope="", protect=False):
+ """Returns a TSScope representing the given scope.
+
+ Often used like with ts.scope(...): ... to automatically prefix node
+ names, e.g., to prevent name collisions.
+ """
+ return TSScope(self, self._full_name(scope), protect)
+
+ def is_clean(self):
+ """True iff the current buffer is empty."""
+ return not self.buffer
+
+ def commit(self, commit_if_clean=True):
+ """Commits self.buffer to self.path."""
+ if self.is_clean() and not commit_if_clean:
+ return False
+ self.path.append(self.buffer)
+ self.buffer = TSDelta(self)
+ return self.path[-1]
+
+ def rollback(self, to_time=0):
+ """Restores the structure to a previously-committed state.
+
+ to_time = 0 means roll back the current buffer.
+ to_time > 0 means roll back so that len(path) == to_time.
+ to_time < 0 means roll back so that len(path) == len(path) - to_time.
+ NOTE: In the final case, len(path) does *not* include the buffer.
+ NOTE: len(path) == 0 is invalid, as path[0] = None (the 'root delta').
+ """
+ old_running = self.buffer
+ self.buffer = TSDelta(self)
+ old_running.rollback()
+ self.buffer = TSDelta(self)
+ if to_time == 0:
+ return
+
+ if to_time >= 0:
+ target_length = to_time
+ else:
+ target_length = len(self.path) + to_time
+ assert len(self.path) >= target_length > 0
+
+ while len(self.path) > target_length:
+ self.path.pop().rollback()
+ # buffer will have a bunch of changes which aren't needed. In
+ # theory we can 'disable' the TSDelta instead of just overwriting it
+ # here, which might improve performance for some such operations.
+ self._force_clean()
+
+ def start_recording(self):
+ """Returns a new TSRecording to track changes to @self."""
+ return TSRecording(self)
+
+ def freeze_frame(self):
+ """Returns a new TSFreezeFrame saving the state of the structure."""
+ return TSFreezeFrame(self)
+
+ def has_node(self, full_name):
+ """True iff @full_name is a registered node in the structure."""
+ assert isinstance(full_name, str)
+ return full_name in self.nodes
+
+ def add_node(self, full_name, display_name=None):
+ """Low-level method to add a node to the structure."""
+ if not self.has_node(full_name):
+ self.nodes.append(full_name)
+ self.display_names[full_name] = display_name or full_name
+ self.buffer.add_node(full_name)
+ if self.shadow:
+ self.shadow.add_node(full_name)
+
+ def remove_node(self, full_name):
+ """Low-level method to remove a node from the structure."""
+ assert not self.facts_about_node(full_name, True), \
+ f"Remove facts using {full_name} before removing it."
+ if full_name in self.nodes:
+ self.nodes.remove(full_name)
+ self.display_names.pop(full_name)
+ self.buffer.remove_node(full_name)
+ if self.shadow:
+ self.shadow.remove_node(full_name)
+
+ def add_fact(self, fact):
+ """Low-level method to add a fact to the structure."""
+ if self.lookup(*fact, read_direct=True):
+ # The fact already exists in the structure.
+ return
+ assert all(map(self.has_node, fact)), \
+ f"Add all nodes in {fact} before adding the fact."
+ for key in self._iter_subfacts(fact):
+ self.facts[key].append(fact)
+ self.buffer.add_fact(fact)
+ if self.shadow:
+ self.shadow.add_fact(fact)
+
+ def remove_fact(self, fact):
+ """Remove a fact from the structure."""
+ if not self.lookup(*fact, read_direct=True):
+ # Fact was already removed, or never added.
+ return
+ for key in self._iter_subfacts(fact):
+ self.facts[key].remove(fact)
+ self.buffer.remove_fact(fact)
+ if self.shadow:
+ self.shadow.remove_fact(fact)
+
+ def add_nodes(self, nodes):
+ """Helper to add multiple nodes to the structure."""
+ for node in nodes:
+ self.add_node(node)
+
+ def remove_nodes(self, nodes):
+ """Helper to remove multiple nodes from the structure."""
+ for node in nodes:
+ self.remove_node(node)
+
+ def add_facts(self, facts):
+ """Helper to add multiple facts to the structure."""
+ for fact in facts:
+ self.add_fact(fact)
+
+ def remove_facts(self, facts):
+ """Helper to remove multiple facts from the structure."""
+ for fact in facts:
+ self.remove_fact(fact)
+
+ def print_delta(self):
+ """Helper context that prints changes to the structure on exit."""
+ class DeltaPrinter:
+ """Helper context manager for printing changes to a structure."""
+ def __init__(self, ts):
+ self.ts = ts
+ self.frame = None
+
+ def __enter__(self):
+ self.frame = self.ts.freeze_frame()
+
+ def __exit__(self, t, v, tb):
+ print(self.ts.freeze_frame() - self.frame)
+ return DeltaPrinter(self)
+
+ @staticmethod
+ def _iter_subfacts(fact):
+ """Yields all keys of self.facts which should hold @fact.
+
+ This method *MUST* be used any time ts.facts is modified. For examples,
+ see ts.add_fact, ts.remove_fact.
+ """
+ for subset in range(2**3):
+ yield tuple(arg if (subset & (0b1 << i)) else None
+ for i, arg in enumerate(fact))
+ for argument in sorted(set(fact)):
+ yield argument
+
+ def _full_name(self, name):
+ """Returns the full name of a node relative to the current scope."""
+ if name.startswith("/"):
+ return name
+ return "{}{}".format(self.current_scope, name)
+
+ def _force_clean(self):
+ """Manually clears the buffer.
+
+ NOTE: Code outside of this file should **NEVER** call _force_clean.
+ """
+ self.buffer = TSDelta(self)
+
+ def __str__(self):
+ """Returns a string representation of the Structure.
+
+ WARNING: This representation basically prints all the facts; it can get
+ quite long, especially with a lot of rules.
+ """
+ def _format_fact(fact):
+ return str(tuple(map(self.display_names.get, fact)))
+ return "TripletStructure ({id}):\n\t{facts}".format(
+ id=id(self), facts="\n\t".join(
+ map(_format_fact, self.lookup(None, None, None))))
+
+class TSScope:
+ """Represents a scope (node name prefix) in a particular structure.
+
+ Often used indirectly as in with ts.scope("..."): ... to automatically
+ prefix node names, but also has some useful methods for using directly (eg.
+ listing all nodes with a certain prefix).
+ """
+ def __init__(self, structure, prefix, protect=False):
+ """Initializes a new TSScope.
+
+ This should usually only be called via ts.scope(...) or
+ scope.scope(...).
+ """
+ self.structure = structure
+ self.prefix = prefix
+ # Keeps track of the prefix on the structure before __enter__ so we can
+ # reset it upon __exit__.
+ self.old_scope_stack = []
+ self.protect = protect
+
+ def __enter__(self):
+ """Instructs the Structure to prefix nodes with self.prefix by default.
+
+ Returns the TSScope instance for convenience.
+ """
+ assert not self.protect
+ self.old_scope_stack.append(self.structure.current_scope)
+ self.structure.current_scope = self.prefix
+ return self
+
+ def __exit__(self, type_, value, traceback):
+ """Resets the Structure's default prefix.
+ """
+ assert not self.protect
+ self.structure.current_scope = self.old_scope_stack.pop()
+
+ def __getitem__(self, index):
+ """Get node relative to self regardless of the structure's prefix.
+ """
+ if self.protect:
+ return "{}{}".format(self.prefix, index)
+ with self:
+ return self.structure[index]
+
+ def scope(self, scope):
+ """Get a sub-scope relative to self regardless of structure's prefix.
+ """
+ with self:
+ return self.structure.scope(scope, self.protect)
+
+ def protected(self):
+ """Returns a protected version of the scope.
+
+ In a protected scope, doing scope[name] will return the full node name
+ as a string instead of a NodeWrapper, and will *NOT* add the node if it
+ does not exist.
+ """
+ return self.structure.scope(self.prefix, True)
+
+ def __iter__(self):
+ """Iterator for all nodes in the structure within the scope.
+ """
+ for member_name in self.structure.nodes:
+ if member_name.startswith(self.prefix + ":"):
+ yield self.structure[member_name]
+
+ def __contains__(self, node):
+ """True iff @node is a member of the scope.
+ """
+ if isinstance(node, NodeWrapper):
+ assert node.structure == self.structure
+ node = node.full_name
+ return node.startswith(self.prefix + ":")
+
+ def __len__(self):
+ """Returns the number of nodes in the structure within the scope.
+ """
+ return sum(node.startswith(self.prefix + ":")
+ for node in self.structure.nodes)
+
+class NodeWrapper:
+ """Represents a single node in a given structure."""
+ def __init__(self, structure, full_name):
+ """Initialize the NodeWrapper."""
+ self.structure = structure
+ self.full_name = full_name
+
+ def map(self, mappings):
+ """Helper for adding facts to the structure.
+
+ node.map({A: B, C: D}) adds (node, A, B) and (node, C, D).
+
+ NOTE: Be wary of repeated keys!
+ """
+ def to_fact(value_node, key_node):
+ return (self.full_name, value_node.full_name, key_node.full_name)
+ facts = []
+ for value, key in mappings.items():
+ if isinstance(key, NodeWrapper):
+ facts.append(to_fact(value, key))
+ else:
+ # Allow sets of keys
+ facts.extend(to_fact(value, sub_key) for sub_key in key)
+
+ # We sort here to ensure it's deterministic.
+ self.structure.add_facts(sorted(facts))
+
+ def scoped_name(self, scope):
+ """Returns string @x such that @scope[@x] = @self.
+
+ This is the "first name" where @scope is the "last name." Used, for
+ example, by ts_utils to find rules that should be marked /= or /MAYBE=
+ in rules based on their name.
+ """
+ if not self.full_name.startswith(scope.prefix):
+ return self.full_name
+ return self.full_name[len(scope.prefix):]
+
+ def __sub__(self, scope):
+ """Syntactic sugar for scoped_name(...)."""
+ return self.scoped_name(scope)
+
+ def remove_with_facts(self):
+ """Removes the node and all associated facts from the structure."""
+ self.structure.remove_facts(
+ self.structure.facts_about_node(self.full_name))
+ self.structure.remove_node(self.full_name)
+
+ def remove(self):
+ """Removes the node (without associated facts) from the structure.
+
+ This is equivalent to assert not facts_about_node; remove_with_facts().
+ It should be used when there is an invariant that no related facts
+ should exist in the structure. See runtime/assignment.py for an
+ example.
+ """
+ self.structure.remove_node(self.full_name)
+
+ def display_name(self, set_to=None):
+ """Gets or sets the display name of the node."""
+ if set_to is not None:
+ self.structure.display_names[self.full_name] = set_to
+ return self.structure.display_names[self.full_name]
+
+ def __eq__(self, other):
+ """True iff @self and @other refer to the same node."""
+ return ((self.structure, self.full_name) ==
+ (other.structure, other.full_name))
+
+ def __hash__(self):
+ """Hash based on the structure and name of the node."""
+ return hash((self.structure, self.full_name))
+
+ def __lt__(self, other):
+ """Lexicographical comparison for sorting."""
+ return ((id(self.structure), self.full_name)
+ < (id(other.structure), other.full_name))
+
+ def __str__(self):
+ """Returns the name of the node."""
+ return self.full_name
+
+class TSDelta:
+ """Represents the change between two TripletStructures."""
+ def __init__(self, ts):
+ """Initialize a TSDelta."""
+ self.ts = ts
+ self.add_nodes, self.add_facts = set(), set()
+ self.remove_nodes, self.remove_facts = set(), set()
+
+ def apply(self):
+ """Apply the TSDelta to self.ts."""
+ assert self is not self.ts.buffer
+ assert self.ts.is_clean()
+ # NOTE: Sorted here is just for determinism.
+ self.ts.add_nodes(sorted(self.add_nodes))
+ self.ts.add_facts(sorted(self.add_facts))
+ self.ts.remove_facts(sorted(self.remove_facts))
+ self.ts.remove_nodes(sorted(self.remove_nodes))
+ self.ts._force_clean()
+ # TODO: maybe this should just wrap it?
+ self.ts.path.append(self)
+
+ def rollback(self):
+ """Undo the TSDelta."""
+ assert self is not self.ts.buffer
+ # NOTE: Sorted here is just for determinism.
+ self.ts.remove_facts(sorted(self.add_facts))
+ self.ts.remove_nodes(sorted(self.add_nodes))
+ self.ts.add_nodes(sorted(self.remove_nodes))
+ self.ts.add_facts(sorted(self.remove_facts))
+ # Maybe we should assert that this is at the end of the path and remove
+ # it?
+
+ def add_node(self, full_name):
+ """Record the addition of a new node."""
+ self.add_nodes.add(full_name)
+
+ def add_fact(self, fact):
+ """Record the addition of a new fact."""
+ self.add_facts.add(fact)
+
+ def remove_node(self, full_name):
+ """Record the removal of an existing node."""
+ try:
+ self.add_nodes.remove(full_name)
+ except KeyError:
+ self.remove_nodes.add(full_name)
+
+ def remove_fact(self, fact):
+ """Record the removal of an existing fact."""
+ try:
+ self.add_facts.remove(fact)
+ except KeyError:
+ self.remove_facts.add(fact)
+
+ def __bool__(self):
+ """True iff the TSDelta is not a no-op."""
+ return (bool(self.add_nodes) or bool(self.add_facts) or
+ bool(self.remove_nodes) or bool(self.remove_facts))
+
+ def __str__(self):
+ """Human-readable format of the TSDelta. """
+ def _format(list_):
+ if list_ and isinstance(sorted(list_)[0], tuple):
+ list_ = [tuple(map(lambda x: self.ts.display_names.get(x, x),
+ map(str, el))) for el in sorted(list_)]
+ return "\n\t\t" + "\n\t\t".join(map(str, list_))
+ return ("TSDelta ({id}):"
+ "\n\t- Nodes: {remove_nodes}" +
+ "\n\t+ Nodes: {add_nodes}" +
+ "\n\t- Facts: {remove_facts}" +
+ "\n\t+ Facts: {add_facts}").format(
+ id=id(self),
+ remove_nodes=_format(self.remove_nodes),
+ add_nodes=_format(self.add_nodes),
+ remove_facts=_format(self.remove_facts),
+ add_facts=_format(self.add_facts))
+
+class TSRecording:
+ """Helper class representing all TSDeltas applied after some checkpoint."""
+ def __init__(self, ts):
+ """Initialize the TSRecording."""
+ self.ts = ts
+ self.start_path = self.ts.path.copy()
+ assert self.ts.is_clean()
+
+ def commits(self, rollback=False):
+ """TSDeltas applied to the structure since @self was initialized.
+
+ If rollback=True, it will also roll back the state of the structure to
+ when @self was initialized.
+ """
+ assert (self.ts.path[:len(self.start_path)] == self.start_path
+ and self.ts.is_clean())
+ deltas = self.ts.path[len(self.start_path):]
+ if rollback:
+ self.rollback()
+ return deltas
+
+ def rollback(self):
+ """Roll back the state of the structure to when @self was initialized.
+ """
+ assert (self.ts.path[:len(self.start_path)] == self.start_path
+ and self.ts.is_clean())
+ self.ts.rollback(len(self.start_path))
+
+class TSFreezeFrame:
+ """Represents a snapshot of a TripletStructure."""
+ def __init__(self, ts):
+ """Initialize the TSFreezeFrame."""
+ self.ts = ts
+ self.nodes = set(ts.nodes)
+ self.facts = set(ts.lookup(None, None, None))
+
+ def delta_to_reach(self, desired, nodes=True, facts=True):
+ """Return a TSDelta, applying which transforms @self to @desired."""
+ delta = TSDelta(self.ts)
+ if nodes:
+ delta.add_nodes = desired.nodes - self.nodes
+ delta.remove_nodes = self.nodes - desired.nodes
+ if facts:
+ delta.add_facts = desired.facts - self.facts
+ delta.remove_facts = self.facts - desired.facts
+ return delta
+
+ def __sub__(self, other):
+ """Syntactic sugar for delta_to_reach."""
+ return other.delta_to_reach(self)
+
+ def __eq__(self, other):
+ """True iff @self and @other represent the same structure state."""
+ return (self.ts == other.ts
+ and self.nodes == other.nodes
+ and self.facts == other.facts)
diff --git a/ts_utils.py b/ts_utils.py
new file mode 100644
index 0000000..8db02e5
--- /dev/null
+++ b/ts_utils.py
@@ -0,0 +1,136 @@
+"""Macros which help building Triplet Structures.
+
+Most of them are focused on automating the construction of production rules.
+"""
+from collections import defaultdict
+
+def RegisterRule(
+ ts, rule_name="", custom_qualifiers=None, auto_assert_equal=False):
+ """A 'low-level' macro for building a rule based on node names.
+
+ We assume that all nodes in the rule are within scopes:
+ :[MapType]:[ActionType]
+ - [ActionType] is applied to the node.
+ :[MapType]
+ - The node, if it exists, is left alone and anything done to it is
+ added.
+ :Insert
+ - The node (and/or associated facts) are inserted.
+
+ @custom_qualifiers can be used to extend the default list of scopes
+ recognized. This is particularly useful when using the same set of nodes in
+ multiple different rules (playing distinct roles in each).
+ """
+ qualifiers = dict({
+ ":MustMap:": "/MUST_MAP",
+ ":TryMap:": "/TRY_MAP",
+ ":NoMap:": "/NO_MAP",
+ ":NoMap1:": "/NO_MAP1",
+ ":NoMap2:": "/NO_MAP2",
+ ":NoMap3:": "/NO_MAP3",
+ ":NoMap4:": "/NO_MAP4",
+ ":NoMap5:": "/NO_MAP5",
+ ":Remove:": "/REMOVE",
+ ":Subtract:": "/SUBTRACT",
+ ":Insert:": "/INSERT",
+ ":OrInsert:": "/INSERT",
+ })
+ qualifiers.update(custom_qualifiers or dict())
+ qualifiers = dict({key: value for key, value in qualifiers.items()
+ if value is not None})
+ qualifiers_sorted = sorted(qualifiers.keys())
+
+ equivalence_classes = defaultdict(set)
+
+ node_scope = ts.scope()
+ with ts.scope(rule_name) as rule_scope:
+ mapnode = ts[":RuleMap:??"]
+ for node in node_scope:
+ name = node - node_scope
+ for qualifier in qualifiers_sorted:
+ if qualifier in name:
+ mapnode.map({
+ rule_scope[":_"]: ts["/RULE"],
+ node: ts[qualifiers[qualifier]],
+ })
+ first_name = name.split(":")[-1]
+ equivalence_classes[first_name].add(node)
+
+ if auto_assert_equal:
+ handled = set()
+ for equivalence_class in equivalence_classes.values():
+ if equivalence_class <= handled:
+ # TODO(masotoud): is this reachable?
+ continue
+ handled.update(equivalence_class)
+ if len(equivalence_class) == 1:
+ continue
+ AssertNodesEqual(ts, sorted(equivalence_class, key=str), rule_name)
+
+def RegisterPrototype(ts, rules, equal, maybe_equal=None):
+ """A higher-level macro for describing "simpler" rules.
+
+ Essentially, Prototypes are rules where you map against some subset of the
+ structure, then the other subset is inserted.
+
+ The actual semantics of RegisterPrototype get a bit hairy, because a lot of
+ things are assumed implicitly. For example, things you mark as /INSERT but
+ not /TRY_MAP are implicitly declared /NO_MAP as well. TODO(masotoud):
+ better document and simplify such behavior.
+
+ Arguments
+ =========
+ - @rules should be a dictionary {name: rule}, where "rule" is tuple (nodes,
+ action). @nodes is a list of nodes, while @action is the action to
+ paply to those nodes (either /INSERT or /MUST_MAP). The other nodes
+ in the scope are treated as the other of the two.
+ - @equal is a list of tuples of nodes which should share an assignment.
+ """
+ scope = ts.scope()
+ all_nodes = list(scope)
+ maybe_equal = maybe_equal or []
+ for rule_name, rule in sorted(rules.items(), key=lambda x: x[0]):
+ rule_scope = ts.scope(rule_name)
+ map_node = rule_scope[":RuleMap:??"]
+ map_node.map({rule_scope[":_"]: ts["/RULE"]})
+
+ if ts["/INSERT"] in rule.keys():
+ assert ts["/NO_MAP"] not in rule
+ if ts["/TRY_MAP"] in rule.keys():
+ rule[ts["/NO_MAP"]] = [node for node in rule[ts["/INSERT"]]
+ if node not in rule[ts["/TRY_MAP"]]]
+ else:
+ rule[ts["/NO_MAP"]] = rule[ts["/INSERT"]]
+
+ remaining_nodes = set(all_nodes.copy())
+ for node_type, nodes_of_type in rule.items():
+ remaining_nodes = remaining_nodes - set(nodes_of_type)
+ if node_type is not None:
+ map_node.map({node: node_type for node in nodes_of_type})
+
+ if ts["/INSERT"] not in rule:
+ map_remaining_to = ts["/INSERT"]
+ elif ts["/MUST_MAP"] not in rule:
+ map_remaining_to = ts["/MUST_MAP"]
+ else:
+ map_remaining_to = None
+
+ if map_remaining_to is not None:
+ remaining_nodes = sorted(remaining_nodes)
+ map_node.map({node: map_remaining_to for node in remaining_nodes})
+
+ for node_set in equal:
+ AssertNodesEqual(ts, node_set, rule_name)
+
+ for node_set in maybe_equal:
+ AssertNodesEqual(ts, node_set, rule_name, equal_type="/MAYBE=")
+
+def AssertNodesEqual(ts, nodes, rule_scope, equal_type="/="):
+ """Enforces that a set of nodes share an assignment in the rule.
+
+ TODO(masotoud): use an actual rule_scope.
+ """
+ map_node = ts[":Equivalence:??"]
+ map_node.map({ts.scope(rule_scope)[":_"]: ts["/RULE"]})
+ for node in nodes:
+ map_node.map({node: ts[equal_type]})
generated by cgit on debian on lair
contact matthew@masot.net with questions or feedback