## No constraints
inferP(
target = P(a | h)
)
## Trivial example with inequality constraint
inferP(
target = P(a | h),
P(-a | h) >= 0.2
)
#' ## The probability of an "and" is always less
## than the probabilities of the and-ed propositions:
inferP(
target = P(a & b | h),
P(a | h) == 0.3,
P(b | h) == 0.6
)
## P(a & b | h) is completely determined
## by P(a | h) and P(b | a & h):
inferP(
target = P(a & b | h),
P(a | h) == 0.3,
P(b | a & h) == 0.2
)
## Logical implication (modus ponens)
inferP(
target = P(b | I),
P(a | I) == 1,
P(a > b | I) == 1
)
## Cut rule of sequent calculus
inferP(
target = P(X + Y | I & J),
P(A & X | I) == 1,
P(Y | A & J) == 1
)
## Solution to the Monty Hall problem (see accompanying vignette):
inferP(
target = P(car2 | you1 & host3 & I),
##
P(car1 & car2 | I) == 0,
P(car1 & car3 | I) == 0,
P(car2 & car3 | I) == 0,
P(car1 + car2 + car3 | I) == 1,
P(host1 & host2 | I) == 0,
P(host1 & host3 | I) == 0,
P(host2 & host3 | I) == 0,
P(host1 + host2 + host3 | I) == 1,
P(host1 | you1 & I) == 0,
P(host2 | car2 & I) == 0,
P(host3 | car3 & I) == 0,
P(car1 | I) == P(car2 | I),
P(car2 | I) == P(car3 | I),
P(car1 | you1 & I) == P(car2 | you1 & I),
P(car2 | you1 & I) == P(car3 | you1 & I),
P(host2 | you1 & car1 & I) == P(host3 | you1 & car1 & I)
)
Run the code above in your browser using DataLab