Examining the categorical formulation of simple substitutions is useful to understand more complex topics.

Simple substitutions form a category.

The objects are ordered lists of sorts representing the variables in contexts [smt.Int("x"), smt.Real("y"), ...].

Substitutions compose and have an identity in a natural way

We give them names to easily reference them. Everything is actually alpha equivalent with respect to the names. ts are expressions like [cos(x), sin(cos(x))]. They kind of match a naive notion of “function expression”.

You can apply these things to a list of terms and they’ll inject those terms into the variable positions.

from dataclasses import dataclass
from kdrag.all import *
@dataclass
class Subst():
    dom : list[smt.ExprRef] # a list of variables
    cod : list[smt.ExprRef] # Kind of redudnant but useful for seeing connection to telescopes
    ts : list[smt.ExprRef] # same length and types as cod, with vars from dom
    def __post_init__(self):
        assert len(self.cod) == len(self.ts)
    def __call__(self, *args : smt.ExprRef) -> list[smt.ExprRef]:
        """ Apply subst to multi term"""
        if len(args) != len(self.dom):
            raise ValueError(f"Expected {len(self.dom)} arguments, got {len(args)}")
        return [smt.substitute(self.ts, *zip(self.dom, args)) for t in self.ts]
    def __matmul__(self, other):
        """Composition plugs other's terms into self's holes"""
        assert isinstance(other, Subst)
        assert len(self.dom) == len(other.cod) and all(v.sort() == w.sort() for v, w in zip(self.dom, other.cod))
        return Subst(other.dom, self.cod, [smt.substitute(t, *zip(self.dom, other.ts)) for t in self.ts])
    @classmethod
    def id_(cls, xs : list[smt.ExprRef]):
        """ identity map does nothing"""
        return Subst(xs, xs, xs)
    def __mul__(self, other : Subst):
        # a monoidal product of substitutions. Doing them in parallel.
        assert isinstance(other, Subst)
        def freshen(xs):
            return [smt.FreshConst(x.sort(),prefix=x.decl().name()) for x in xs]
        odom = freshen(other.dom)
        ocod = freshen(other.cod)
        ots = [smt.substitute(t, *zip(other.dom, odom)) for t in other.ts]
        return Subst(self.dom + odom, self.cod + ocod, self.ts + odom)
    def __eq__(self, other) -> smt.BoolRef:
        """The proposition that two substitutions/morphisms are equal. To be assumed or proven"""
        assert len(self.dom) == len(other.dom) and len(self.cod) == len(other.cod)
        assert all(v.sort() == w.sort() for v, w in zip(self.dom, other.dom))
        assert all(v.sort() == w.sort() for v, w in zip(self.cod, other.cod))
        ots = [smt.substitute(t, *zip(other.dom, self.dom)) for t in other.ts] # rename to common variables self.dom
        return smt.ForAll(self.dom, smt.And([t1 == t2 for (t1,t2) in zip(self.ts, ots)]))


x,y,z = smt.Ints('x y z')
Subst.id_([x]) @ Subst.id_([y])


Subst(dom=[y], cod=[x], ts=[y])
Subst([x], [y,z], [x + 1, x - 2]) @ Subst([x], [y], [x * 7])

Subst(dom=[x], cod=[y, z], ts=[x*7 + 1, x*7 - 2])

Subst.id_([x]) * Subst.id_([y])
Subst(dom=[x, y!7], cod=[x, y!8], ts=[x, y!7])

A sequence of ground terms can be lifted into a subst coming from the empty context. This is a standard categorical trick of pointing out a particular thing by using a map from some unit object.

def lift_ground(*ts) -> Subst:
    return Subst([], [smt.FreshConst(t.sort()) for t in ts], list(ts))

Function symbols can be seen as the substitution that would apply them

def lift_funcdecl(f : smt.FuncDeclRef):
    args = [smt.FreshConst(f.domain(n)) for n in range(f.arity())]
    out = [smt.FreshConst(f.range())]
    return Subst(args, out, [f(*args)])

foo = smt.Function("foo", smt.IntSort(), smt.RealSort(), smt.BoolSort())
lift_funcdecl(foo)

The is an opposite category of substitutions that instead of applying the ts to args, narrows the the variables in a predicate P(x) to the ts. Considering the predicate as a morphism from the vars to [Bool], this is reverse composition.

P can also be considered as a Subst to Bool.

def subst(s : Subst, P : smt.ExprRef) -> smt.ExprRef: 
    return smt.substitute(P, zip(s.cod, s.ts))

def lift_pred(vs : list[smt.ExprRef], P : smt.ExprRef) -> Subst:
    """
    Lift a predicate P over variables vs to a substitution.
    """
    return Subst(vs, [smt.FreshConst(P.sort())], [P])

What is a unification problem in these terms?

It is useful to extend the notion of a single equation to a multiequation. Then what we have is two substitutions need to be equal.

          - S >  
k - H > m        n
          - T > 

For some reason literature seems to describe unification as as coequalizer. I don’t understand why. Maybe I’m totally wrong. Or perhaps that are taking the “narrowing” interpretation of substitutions rather than the “applying” interpretation.

See Goguen What is Unification https://www.sciencedirect.com/science/article/abs/pii/B9780120463701500127?via%3Dihub https://www.cs.bu.edu/fac/snyder/publications/UnifChapter.pdf section 3.3.3

def unify(s : Subst, t : Subst):
    assert len(s.dom) == len(t.dom) and all(v.sort() == w.sort() for v, w in zip(s.dom, t.dom))
    assert len(s.cod) == len(t.cod) and all(v.sort() == w.sort() for v, w in zip(s.cod, t.cod))
    todo = list(zip(s.ts, [smt.substitute(e, *zip(t.dom, s.dom)) for e in t.ts]))
    subst = Subst.id_(s.dom) # null substitution
    while todo:
        (s,t) = todo.pop()
        if s in s.dom: # is var
            newsubst = ...
            subst = subst @ newsubst
        #yada. Not sure it's calirfying to write this in this way
    

Equational axioms can be expressions as the commuting squares of certain arrows. https://ncatlab.org/nlab/show/Lawvere+theory https://en.wikipedia.org/wiki/Lawvere_theory

Every object is a product of a distinguished object: yes. Homomorphisms are functors in this context. This is nice tight formulation of homomorphism, although I find the more expanded definition much more intuitive. https://en.wikipedia.org/wiki/Homomorphism

add  = Subst([x,y], [z], [x + y])
swap = Subst([x,y], [y,x], [y,x])
kd.prove(add == add @ swap)

⊦ForAll([x, y], And(x + y == y + x))

Telescopes

An interesting thing to do is start enriching the notion of object / context / things substitions go from and to.

One formulation of telescopes https://ncatlab.org/nlab/show/type+telescope for a refinement type looking thing is to pack every variable with a boolean expression describing what preconditions / subset the subtitution is coming from. These are partial substitutions in some sense.

The meaning of a telescope can be somewhat explained by showing how to “forall” a predicate in that context using TForAll. Because of the iterated construction, the earlier variables are in scope for the later variables.

from kdrag.all import *
type Tele = list[tuple[smt.ExprRef, smt.BoolRef]]
def TForAll(vs : Tele, P):
    for v,pre in reversed(vs):
        P = smt.ForAll([v], smt.Implies(pre, P))
    return P

x,y,z = smt.Ints('x y z')
TForAll([(x, x > 0), (y, y > 0)], x + y > 0)

∀x : x > 0 ⇒ (∀y : y > 0 ⇒ x + y > 0)

Telescope mappings are a generalization of substitutions. We can use them to organize a theorem proving process involving pre and post conditions. They represent a bundling of the previous Subst and theorems/proofs about pre and post conditions of the terms in that subst.

from dataclasses import dataclass
@dataclass
class TeleMap():
    dom : Tele
    cod : Tele
    ts : list[smt.ExprRef]  # len(cod) expression using variables in dom. When dom preconditions are true, cod conditions should hold of ts.
    def __post_init__(self):
        assert len(self.ts) == len(self.cod)
        subst = [(v, t) for (v, _), t in zip(self.cod, self.ts)]
        self.pfs = []
        for (v, cond) in self.cod:
            self.pfs.append(kd.prove(TForAll(self.dom, smt.substitute(cond, *subst))))
    def __call__(self, *args):
        assert len(args) == len(self.dom)
        subst = [(v, a) for (v, _), a in zip(self.dom, args)]
        #for pf in self.pfs:
        for (_, pre) in self.dom:
            kd.prove(smt.substitute(pre, *subst))
        return [smt.substitute(t, *subst) for t in self.ts]
    @classmethod
    def id_(cls, xs : Tele):
        return cls(xs, xs, [v for v, _ in xs])
    def __matmul__(self, other):
        assert len(self.dom) == len(other.cod)
        subst = [(v, w) for (v, _), (w, _) in zip(self.dom, other.cod)]
        ws = [w for w,_ in other.cod]
        vs = [v for v,_ in self.dom]
        for (v, pre), (w, post) in zip(self.dom, other.cod):
            assert v.sort() == w.sort(), f"Sort mismatch: {v.sort()} != {w.sort()}"
            # allows subset composition rather than on the nose composition.
            kd.prove(TForAll(other.cod, smt.substitute(pre, *subst))) 
        return TeleMap(other.dom, self.cod, [smt.substitute(t, *zip(vs, other.ts)) for t in self.ts])

TeleMap([(x, x > -1)],[(y, y > 0)], [x + 1])  @ TeleMap([(x, x > 0)], [(y, y > 0)], [x + 1])
TeleMap(dom=[(x, x > 0)], cod=[(y, y > 0)], ts=[x + 1 + 1])

A well formed telescope mapping represents a theorem about the mappings in the ts. If all variables satisfy the preconditions in the self.dom telescope, the terms self.ts should obey the postconditions in the self.cod

TeleMap([(x, x > 0), (z, z > x)], [(y, y > 0), (z, z > 1)], [x + z, z]).pfs
[|- ForAll(x,
        Implies(x > 0, ForAll(z, Implies(z > x, x + z > 0)))),
 |- ForAll(x, Implies(x > 0, ForAll(z, Implies(z > x, z > 1))))]
TeleMap([(x, x > 0)], [(y, y > 0)], [x + 1])(smt.IntVal(7))
[7 + 1]
# fails to meet precondition
TeleMap([(x, x > 0)], [(y, y > 0)], [x + 1])(smt.IntVal(-2))
---------------------------------------------------------------------------

LemmaError                                Traceback (most recent call last)

Cell In[45], line 1
----> 1 TeleMap([(x, x > 0)], [(y, y > 0)], [x + 1])(smt.IntVal(-2))


Cell In[43], line 18, in TeleMap.__call__(self, *args)
     16 #for pf in self.pfs:
     17 for (_, pre) in self.dom:
---> 18     kd.prove(smt.substitute(pre, *subst))
     19 return [smt.substitute(t, *subst) for t in self.ts]


File ~/Documents/python/knuckledragger/kdrag/tactics.py:216, in prove(thm, by, admit, timeout, dump, solver, unfold)
    212         raise Exception(
    213             "Worked with quantifier stripped. Something is going awry", pf
    214         )
    215     else:
--> 216         raise e
    217 except Exception as e:
    218     raise e


File ~/Documents/python/knuckledragger/kdrag/tactics.py:194, in prove(thm, by, admit, timeout, dump, solver, unfold)
    192         by.append(kd.kernel.prove(thm == thm1, by=trace, timeout=timeout))  # type: ignore
    193 try:
--> 194     pf = kd.kernel.prove(
    195         thm, by, timeout=timeout, dump=dump, solver=solver, admit=admit
    196     )
    197     kdrag.config.perf_event("prove", thm, time.perf_counter() - start_time)
    198     return pf


File ~/Documents/python/knuckledragger/kdrag/kernel.py:117, in prove(thm, by, admit, timeout, dump, solver)
    115 if res != smt.unsat:
    116     if res == smt.sat:
--> 117         raise LemmaError(thm, "Countermodel", s.model())
    118     raise LemmaError("prove", thm, res)
    119 else:


LemmaError: (-2 > 0, 'Countermodel', [])
x = smt.Bool("x")
TRUE : Tele = [(x, x)]
TRUE2 = TeleMap([], TRUE, [smt.BoolVal(True)])
Bool = [(x, smt.BoolVal(True))]
FALSE : Tele = [(x, smt.Not(x))]
smt.Int("y")
ZERO = TeleMap([(y, y == 0)], TRUE, [y == 0])
POS = TeleMap([(y, y > 0)], TRUE, [y > -1])

Bits and Bobbles

While I normalized to type Tele = list[tuple[smt.ExprRef, smt.BoolRef]] there is also a reasonable versions of Tele that hold smt.Lambda([x], pre) in that second position. This makes it look more like typical dependent typees. For example, I can define Pos = smt.Lambda([x], x > 0) and have a telescope [(x, Pos)] instead of [(x, Pos(x))]. The forms are basically interconvertible but they make the mind go different places.

This is approximately the subset model of dependent type theory?

One could take all variables to be in a DeclareSort("PreTerm") sort and then the subsets really do take care of the things the z3 simple sort system is doing for us. This is more flexible, but pushing more into runtime/solvetime / deeper encoding depth.

SMTLIB is a functional programming language kind of. Refinement/liquid/subset typing applied to it is perfectly reasonable and pleasingly homoiconic. Maybe using spacer to discharge them could be kind of clever.

(x, x)

Telescope unification?

The equalizer in a lawvere theory is E-unification.

coalgebra. a -> f a

operads are term rewriting equations with linearity on the rules

Hmm. I’m using smt.substitute which is simultaneous substitution. Since my semantics is nested forall, I should be used iterated substutition?

GATs

EATs https://ncatlab.org/nlab/show/essentially+algebraic+theory

https://ncatlab.org/nlab/show/substitution

https://ncatlab.org/nlab/show/categorical+semantics+of+dependent+type+theory It really seems like I have defined the opposite category to the one they want.

Subobjects are modelled as mappings into an object that are injective.

https://people.mpi-sws.org/~dreyer/courses/catlogic/jacobs.pdf

https://ncatlab.org/nlab/files/HofmannExtensionalIntensionalTypeTheory.pdf Amongst the many nice things in this thesis are a good description of telescopes on p. 26 section 2.2.2 Gamma |- f => Delta

https://proofassistants.stackexchange.com/questions/755/whats-the-relationship-between-refinement-types-and-dependent-types

Hmm. Maybe explicit cod was a mistake. I dunno.

I am puzzled about whether there is really that much utility of the telescope form forall x, p(x) => forall y, q(x,y) => ... vs just the smashed together version forall x y ..., p(x) /\ q(x,y) /\ .... The latter is more reminiscent of Isabelle Pure.

https://math.andrej.com/2012/09/28/substitution-is-pullback/

https://www.cs.bu.edu/fac/snyder/publications/UnifChapter.pdf

Subset Model

https://www.philipzucker.com/frozenset_dtt/ Showed how to use finite sets the interpret some dependent types

from typing import Callable
from frozendict import frozendict
import itertools
type Family = Callable[[object], Type]
type Type = frozenset

Void = frozenset({})
Unit = frozenset({()})
Bool = frozenset({True, False})
def Fin(n : int) -> Type:
    return frozenset(range(n))
def Vec(A : Type, n : int) -> Type:
    return frozenset(itertools.product(A, repeat=n))

def is_type(A: Type) -> bool: # |- A type
    return isinstance(A, frozenset)
def has_type(t: object, A: Type) -> bool: # |- t : A
    return t in A
def eq_type(A: Type, B: Type) -> bool: # |- A = B type
    return A == B
def def_eq(x : object, y: object, A : Type) -> bool: # |- x = y : A
    return x == y and has_type(x, A) and has_type(y, A)

def Sigma(A: Type, B: Family) -> Type:
    return frozenset({(a, b) for a in A for b in B(a)})
def Pair(A : Type, B: Type) -> Type:
    return Sigma(A, lambda x: B)

def Pi(A : Type, B : Family) -> Type:
    Alist = list(A)
    return frozenset(frozendict({k:v for k,v in zip(Alist, bvs)}) for bvs in itertools.product(*[B(a) for a in Alist]))
def Arr(A : Type, B: Type) -> Type:
    return Pi(A, lambda x: B)

def Sum(A : Type, B: Type) -> Type:
    return frozenset({("inl", a) for a in A} | {("inr", b) for b in B})

def Id(A : Type, x : object, y : object) -> Type:
    return frozenset({"refl"}) if x == y else frozenset()
def U(n : int, l : int) -> Type:
    if l > 0:
        u = U(n, l-1)
        return u | frozenset([u]) # Cumulative
    elif n > 0:
        u = U(n-1, 0)
        # TODO also the Pi and Sigma
        return u | frozenset([Arr(A,B) for A in u for B in u]) | frozenset([Pair(A,B) for A in u for B in u]) | frozenset([Fin(n)])
    else:
        return frozenset([Unit, Bool, Void])
def Quot(A : Type, R) -> Type:
    return frozenset(frozenset({y for y in A if R(x,y)}) for x in A)
def SubSet(A :  Type, P : Family) -> Type: # very much like Sigma
    return frozenset({(x, ()) for x in A if P(x)}) # Note because of pythion truthiness, this also accepts ordinary bool value predicates.

We can also make a subset model as mentioned here https://ncatlab.org/nlab/files/HofmannExtensionalIntensionalTypeTheory.pdf

Types are interpreted as a pair of a frozenset and a callable function representing the subset orf that frozenset

There are now two equalities to talk about. On the nose equality == and kleene equality https://en.wikipedia.org/wiki/Kleene_equality

from typing import Callable
from frozendict import frozendict
import itertools
type Type = tuple[frozenset, Callable[[object], bool]]
type Family = tuple[Callable[[object], frozenset], Callable[[object], Callable[[object], bool]]]
# type Family = Callable[[object], Type] # A family of types, indexed by an object
TRUE = lambda: True
FALSE = lambda: False

Void = (frozenset({}), TRUE)
Unit = (frozenset({()}), TRUE)
Bool = (frozenset({True, False}), TRUE)
def Fin(n : int) -> Type:
    return (frozenset(range(n)), TRUE)
def Sigma(A: Type, B: Family) -> Type:
    return frozenset({(a, b) for a in A[1] for b in B[0](a)}), lambda ab: B[1](ab[0])(ab[1]) if A[1](ab[0]) else FALSE
def def_eq(x, y, A):
    return x == y
def ext_eq(x,y,A): # kleene equality https://en.wikipedia.org/wiki/Kleene_equality
    return (not A[1](x) and not A[1](y)) or x == y #??? not so sure

z3 subst catgegoyr

https://ncatlab.org/nlab/show/substitution#CategoricalSemantics

A cateogical presentrration of the nufnication algorithm. We break down the lawvere theory into par and do it piece by piece somehow.

I guess pullback is kind of like unifying terms from two different unification var sets

https://math.andrej.com/2012/09/28/substitution-is-pullback/ predicates. https://en.wikipedia.org/wiki/Pullback

substitution equationally We work with the entire system…

-- eq is equalizer morphism
eq(f,f) = id(dom(f))  -- id rule
eq(f,g) = eq(g,f)  -- kind of orient rule but all at once.
eq(par(f,g),par(h,k)) = par(eq(f,h),eq(g,k))
eq(comp(f,g), comp(h,k)) =?
 -- unify  f,h first, then unify 
 eq(comp(f,g), h) = eq(f . ??, h) something?


eq(id,f) =? 

https://link.springer.com/chapter/10.1007/3-540-17162-2_139 A categorical unification algorithm Ryhdeheard and burstall Hmm. Coequalizer?

Hmm. Yes the variables in the terms are associated with the CODOMINA of the subtitution. The domain of the substitution is a mere indexing basically and almost doesn’t have a sense of being a variable.

To be comparing vars in codomain like unification, it should be aa coequalizer

{x -> f(x’, y’), y -> g(x’, y’)} [f(v0,v1), g(v0,v1)]

[f(p(q(a)))] is a concrete ground term. The codomain is implicit. It could be 0 vars (?) The domain is 1.

A signature can be expressed as noting atomic elements [f(v0,v1)] [g(v0)]. [mul(v0,v1)] [inv(v0)] []

A particular open preciate is [even(succ(succ(v0)))] a multi predicate is interpreted as conjunction?

Raising nad lowering unneccesary variables. Projecting out variables as exists and forall duplicating variables ~ eq

[v0, v0] expsress {x -> x’, y -> x’} a merging substitution There is no duplicating susbtitution. {x -> x’, x -> y’}

vBool -> F(x) . vInt -> succ(y) = vBool -> F(succ(y)) post composition is substituion into predicate vBool -> F(x) . vInt -> add(z,y) = vBool -> F(add(x,y)) this makes morew vairblwes.

f(x,y) = x is and equation saying the vInt -> f(x,y) is the same as raise(vInt -> x, vInt)

|-

https://www.cl.cam.ac.uk/~amp12/papers/catl/catl.pdf catehorical logic pitts

https://www.youtube.com/watch?v=uQp-Pi5jSNk quantifiers as djoints

from z3 import *

Ctx = list[SortRef]


class Morph():
    dom: list[SortRef] # not so implicit
    cod: list[SortRef] # implicit in subst (?).
    subst = list[ExprRef]
    def __init__(self, dom, cod, subst):
        assert [t.sort() for t in in subst] == dom
        # assert all set(get_vars(t) for t in subst) in 
        self.dom = dom
        self.cod = cod
        self.subst = subst
    def __matmul__(self, other):
        assert self.dom == other.cod
        return Morph(other.dom, self.cod, [substitute_vars(t, *other.subst) for t in self.subst])


def id(n): # or a sort list.
    return [Var(i) for i in range(n)]
def comp(fs,gg):
    return [substitute_vars(f, *gs) for f in fs]
def par(fs,gs):
    # TODO: shift gs.
    return [f for f in fs] + [substutute_vars(g, Var(n + len(fs)) for g in gs]
mullawv = [mul(Var(0), Var(1))]
elawv = [e]
class Predicates():
    vs: Ctx #list[SortRef]
    expr: BoolRef
    #exprs: set[BoolRef] equivalence exprs?

    def proj(self) -> Ctx:
        # returns an object in substitution category
        return vs #(?)

# category is preorder
class Morph():
    dom : Pred
    cod : Pred
    subst : Subst
    def __init__(self, dom, cod, subst):
        # The variables aren't shared? Hmmmmm.
        assert dom.vs == subst.dom
        assert cod.vs == subst.cod

        assert prove(ForAll(cod.vs, Implies(substitute_vars(dom.expr, *subst.subst), cod.expr)))

        #assert prove(ForAll(dom.vs, Implies(dom.expr, cod.expr)))
        self.dom = dom
        self.cod = cod
    # quantifiers
    def add(self, vsort): 
        # add an unused var along for the ride? Antiprojecting.
        # "weaken". "lift" "raise"
    def leftadjoint(self):
        # x.add(v).leftadjoint() ~ x

    def rightadjoint(self):
        # x.rightadjoint().add(v) ~ x

Typing as fibers. gamma |- A is like a (entangled) product of gamma and A

proj(gamma - A) = gamma

The functoriality of substutuition to the predicate category is saying that if you prove and implication with variables in it, you can substute in anything you want and still have a proof. No. predicates have to be over the same context to be compared… gamma |- A -> B No wait. If we include a subst we have a way to compare the two variable sets…

There is a category over a single ctx. The is also a category of ctx maps

Apropos partial functions: the thing that Nate and I talked about a bit was modeling these as spans, So a partial function f:A-> B would be a pair of regular functions g:X->A, h:X->B, where h is monic, and f(a) = b would translate to Ex.g(x) = a and h(x) = b. If you make h epic, that corresponds to the function being total, and if you relax the constraint that h is monic, you get what feel like multiset valued functions. For encoding in a theorem prover, I wonder if it would be smoother to use cospans, so g:A->X, h:B->X, with h monic. Then f(a) = b could translate to g(a) = f(b), and you don’t need the quantifier. X could be basically Maybe B.

From lunch: sorry, coequalizer, not equalizer
About the substitution thing: the inclusion function, from the set of definable sets of individuals (represented by one-variable formulas/predicates) to the set of definable sets of pairs of individuals (represented by two-variable formulas) is actually the result of a substitution, if you look at it a certain way. Suppose I have a two-argument function, f(x,y) and a one variable formula F(x). I can make a two variable formula by substitution: F(f(x,y)). If my function is projection of the left argument, so that f(x,y) = x, then what my substitution is doing is, in a more explicit way, turning F from a one variable formula into a slightly degenerate case of a two variable formula.

There's a contravariant functor thing going on here: f : A -> B induces a functor from the fiber (in this case, a Boolean algebra of definable sets or something) over B to the fiber over A.


Yea, coequalizer does seem right now
What is the fiber? There is a special Bool object maybe and  strangely the morphisms coming out of bool represent predicates and the target object is the vars in the predicate expression?
Or it the boolean algebra a category connected to our substitution category by a functor, ie a model of our signature

So, the boolean algebra is the fiber. The objects in there are equivalence classes of formulas, and the arrows are the boolean algebra ordering, which corresponds to implication.

The idea is that there's a total category whose objects are formulas in a typing context. So, like a "two variable formula" is a formula whose variables are a subset of x,y, in a context "x: individual, y: individual". The contexts are the base category, and the functor that erases formulas and just leaves the context is the fibration. The fibers are the preimage categories of a given context plus its identity map - in the FOL case, these turn out to be Boolean algebras. (edited) 

The contexts are the objects in the base category of substitutions or each context is an entire base category? (edited) 


They're objects in a base category. Base category is not quite a category of substitutions, it's more like a category where the maps are function symbols. So like, a two variable function f induces a map from the context "x: individual, y: individual" to "x:individual". But a map in the base category like this creates a map (going in the other direction) between the fibers, which is essentially the result of substituting f(x,y) for x in each of the formulas-in-context in the fiber, thereby changing the context that it lives in.


What about if you compose two maps? Isn’t it then not a function symbol and something more structured, more like a general substitution? Its generated by the function symbols ?

Yeah, that's right.
I'd think of it as a term, so like f(g(x,z),y) might be a map from the context x,y,z to the context x.



Something I was realizing last night is substitution goes the other way. x,y,z is the codomain, despite how weird that feels
{x’ -> f(g(x,z),y)}
makes that feel better. Those codomain vars are on the right now

All is well with the world.

Are the upper objects full equivalence classes? Seems like a burden. Why can’t the upper objects be individual predicate expressions, what is the set getting us?

I think they can. It just wouldn't be a boolean algebra anymore.

a = b = c, why can’t an object be {a,b}

refeinment sorts

I had notes somewhere about prolog using CLP.

https://arxiv.org/pdf/2010.07763 tutorial

https://noamz.org/oplss16/refinements-notes.pdf

https://proofassistants.stackexchange.com/questions/755/whats-the-relationship-between-refinement-types-and-dependent-types hmm. Pierre-Marie suggest that a insifficiently restricted P in {x P} can effectively enocde full dependent types. “realizability interpetation” in PRL.

Does this subsyetms make me closer to cody’s Boole?

smtlib is basically a simply type programming language. Why not

For using new logics in knuckeldragger we can: deep embed in z3 shallow embed into z3 make new judgement forms at the python metalevel that contain kd.Proof.

annotate will be an immediate call to check (?). So annotate will take in Gamma.

reflection could be an axiom schema to convert HasType to a kd.Proof. Interesting.

rom dataclasses import dataclass
from kdrag.all import *
@dataclass
class Telescope():
    vs : list[smt.ExprRef]
    preds : list[smt.BoolRef | smt.QuantifierRef] # crucially can contain previous vars
    def __init__(self, *vspreds): # could just make the empty context?
        self.vs, self.preds = zip(*vspreds)
    def __repr__(self):
        return ", ".join([f"{v} : {v.sort()} | {pred}" for v,pred in zip(self.vs,self.preds)])
    def entail(self):
        acc = smt.BoolVal(True)
    def extend(self, v, pred):
        # add a new variable to the context
        # should check that previous preds do not contain new v.
        return Telescope(self.vs + [v], self.preds + [pred])

# more linked list like. Easier to make immutable? Hmm. But lists are so much easier to work with.
class Telescope2():
    tail : Telescope2
    v : smt.ExprRef
    pred : smt.BoolRef



# as bottom up judgement, base case should take a kd.Proof
# This is actually kind of cool.
@dataclass
class EntailJudge():
    ctx: Telescope
    c : smt.BoolRef

@dataclass
class SubClass():
    ctx : Telescope
    v1 : smt.ExprRef
    typ1 : smt.BoolRef
    v2 : smt.ExprRef
    typ2 : smt.BoolRef
    # do we automatically do it in the smart consutrctor or not?
    def __init__(self): ...
    def __repr__(self):
        return f"{self.ctx} |- {self.t} <: {{ {self.t.sort()} | {self.typ} }}"

@dataclass
class HasType():
    ctx : Telescope
    t : smt.ExprRef
    typ : smt.BoolRef
    def __repr__(self):
        return f"{self.ctx} |- {self.t} : {{ {self.t.sort()} | {self.typ} }}"


x,y,z = smt.Ints("x y z")

Telescope((x, x > 0), (y, x > y), (z, z < y))

class RefinedSort():
    sort : z3.SortRef
    pred : z3.ExprRef # not a boolref. a lambda {x : Sort | pred(x) } -> {y : : sort2 | pred(x,y) }
    v : z3.ExprRef
    pred : z3.BoolRef # maybe invloing pred
    ctx : list[z3.ExprRef] # y : ?  |- {x : sort | pred(x)}


from dataclasses import dataclass
from z3 import *
import z3
@dataclass
class RSort():
    sort : z3.SortRef
    pred : z3.BoolRef # self is referred to as var(0)
@dataclass
class Arr():
    dom: RSort
    cod: RSort # var(1) refers to var bound in dom.
    @property
    def sort(self):
        return ArraySort(self.dom.sort, self.cod.sort)


def entail(gamma, c):
    xs = [FreshConst(sort) for sort in gamma]
    s = Solver()
    s.add(Not(ForAll(xs, substitute_vars(c, *xs))))
    assert s.check() == unsat


def subtyp(gamma, sort1, sort2):
    if isinstance(sort1, Arr) and isinstance(sort2, Arr):
        assert subtyp(gamma, sort2.dom, sort1.dom) and subtyp([sort2.dom] + gamma, sort1.cod, sort2.cod)    
    elif isinstance(sort1, RSort) and isinstance(sort2, RSort):
        assert sort1.sort == sort2.sort
        x = FreshConst(sort1.sort)
        f = ForAll([x], substitute_vars(Implies(sort1.pred, sort2.pred), x))
        assert entail(gamma, f)
    else:
        raise Exception("Unknown type", sort1, sort2)


def synth(gamma, term):
    

def check(gamma, term, typ):
    assert term.sort() == typ.sort
    if is_select(term): # app
    elif is_quantifier(term) and term.is_lambda():
        assert isinstance(typ, Arr)
        x, body = open_binder(term)
        gamma[x] = typ.dom
        return check(gamma, body, typ.cod)
    elif term in gamma: # gotta find it
        s = gamma[term]