Compare commits

...

13 commits

5 changed files with 471 additions and 243 deletions

View file

@ -2,15 +2,15 @@
Interested in seeing which satisfiable models from [arranstewart/magic](https://github.com/arranstewart/magic) have the variable sharing property? Interested in seeing which satisfiable models from [arranstewart/magic](https://github.com/arranstewart/magic) have the variable sharing property?
(1) Generate satisfiable matrix models using `magic`. (1) Generate satisfiable matrix models using `MaGIC`.
- Use the `ugly` data format as the output - Use the `ugly` data format as the output
- Keep in mind not all logic features in magic are supported, check out the [GitHub issue tracker](https://github.com/Brandon-Rozek/matmod/issues) to see upcoming features or make your own requests
(2) Run our tool! It will first attempt to parse all the matrices in the output file and then check for the variable sharing property one-by-one. (2) Run our tool! It will first attempt to parse all the matrices in the output file and then check for the variable sharing property one-by-one.
``` ```
python3 parse_magic.py < examples/R6 ./vspursuer.py -i examples/R6
``` ```
If you face any troubles, feel free to reach out. This tool also has capabilities to generate satisfiable models given a specification (see: R.py), however, it is much slower than magic so you're better off using that. If you face any troubles, feel free to reach out. This tool also is able to generate satisfiable models given a specification (see: R.py). This is, however, much slower than MaGIC so you're better off using that.
Check out the [GitHub issue tracker](https://github.com/Brandon-Rozek/matmod/issues) to see upcoming features or make your own requests.

View file

@ -219,15 +219,18 @@ def satisfiable(logic: Logic, model: Model, interpretation: Dict[Operation, Mode
def model_closure(initial_set: Set[ModelValue], mfunctions: Set[ModelFunction]): def model_closure(initial_set: Set[ModelValue], mfunctions: Set[ModelFunction], top: Optional[ModelValue], bottom: Optional[ModelValue]) -> Set[ModelValue]:
""" """
Given an initial set of model values and a set of model functions, Given an initial set of model values and a set of model functions,
compute the complete set of model values that are closed compute the complete set of model values that are closed
under the operations. under the operations.
If top or bottom is encountered, then we end the saturation procedure early.
""" """
closure_set: Set[ModelValue] = initial_set closure_set: Set[ModelValue] = initial_set
last_new: Set[ModelValue] = initial_set last_new: Set[ModelValue] = initial_set
changed: bool = True changed: bool = True
topbottom_found = False
while changed: while changed:
changed = False changed = False
@ -251,6 +254,18 @@ def model_closure(initial_set: Set[ModelValue], mfunctions: Set[ModelFunction]):
if element not in closure_set: if element not in closure_set:
new_elements.add(element) new_elements.add(element)
# Optimization: Break out of computation
# early when top or bottom element is foun
if top is not None and element == top:
topbottom_found = True
if bottom is not None and element == bottom:
topbottom_found = True
if topbottom_found:
break
if topbottom_found:
break
# We don't need to compute the arguments # We don't need to compute the arguments
# thanks to the cache, so move onto the # thanks to the cache, so move onto the
# next function. # next function.
@ -274,8 +289,27 @@ def model_closure(initial_set: Set[ModelValue], mfunctions: Set[ModelFunction]):
if element not in closure_set: if element not in closure_set:
new_elements.add(element) new_elements.add(element)
# Optimization: Break out of computation
# early when top or bottom element is foun
if top is not None and element == top:
topbottom_found = True
if bottom is not None and element == bottom:
topbottom_found = True
if topbottom_found:
break
if topbottom_found:
break
if topbottom_found:
break
closure_set.update(new_elements) closure_set.update(new_elements)
changed = len(new_elements) > 0 changed = len(new_elements) > 0
last_new = new_elements last_new = new_elements
if topbottom_found:
break
return closure_set return closure_set

View file

@ -3,8 +3,7 @@ Parses the Magic Ugly Data File Format
Assumes the base logic is R with no extra connectives Assumes the base logic is R with no extra connectives
""" """
import argparse import re
import sys
from typing import TextIO, List, Optional, Tuple, Set, Dict from typing import TextIO, List, Optional, Tuple, Set, Dict
from model import Model, ModelValue, ModelFunction from model import Model, ModelValue, ModelFunction
@ -13,169 +12,328 @@ from logic import (
Conjunction, Conjunction,
Negation, Negation,
Necessitation, Necessitation,
Disjunction Disjunction,
Operation
) )
from vsp import has_vsp
class SourceFile: class SourceFile:
def __init__(self, fileobj: TextIO): def __init__(self, fileobj: TextIO):
self.fileobj = fileobj self.fileobj = fileobj
self.current_line = 0 self.current_line = 0
self.reststr = ""
def __next__(self): def next_line(self):
contents = next(self.fileobj) """
Grabs the next line.
If reststr is populated return that, otherwise
consume generator
"""
if self.reststr != "":
reststr = self.reststr
self.reststr = ""
return reststr
contents = next(self.fileobj).strip()
self.current_line += 1 self.current_line += 1
return contents return contents
def __next__(self):
"""
Grabs the next word token from the stream
"""
if self.reststr == "":
self.reststr = next(self.fileobj).strip()
self.current_line += 1
tokens = self.reststr.split(" ")
next_token = tokens[0]
self.reststr = " ".join(tokens[1:])
return next_token
class UglyHeader: class UglyHeader:
def __init__(self, negation: bool, necessitation: bool): def __init__(self, negation: bool, necessitation: bool, custom_model_functions: List[Tuple[int, str]]):
# Booleans describing the logical fragment
self.negation = negation self.negation = negation
self.necessitation = necessitation self.necessitation = necessitation
# List of custom model functions described as
# a sequence of (adicity, symbol) pairs
self.custom_model_functions = custom_model_functions
class ModelBuilder: class ModelBuilder:
def __init__(self): def __init__(self):
self.size : int = 0 self.size : int = 0
self.carrier_set : Set[ModelValue] = set() self.carrier_set : Set[ModelValue] = set()
self.num_negation: int = 0
self.mnegation: Optional[ModelFunction] = None self.mnegation: Optional[ModelFunction] = None
self.num_order: int = 0
self.mconjunction: Optional[ModelFunction] = None self.mconjunction: Optional[ModelFunction] = None
self.mdisjunction: Optional[ModelFunction] = None self.mdisjunction: Optional[ModelFunction] = None
self.num_designated: int = 0
self.designated_values: Set[ModelValue] = set() self.designated_values: Set[ModelValue] = set()
self.num_implication: int = 0
self.mimplication: Optional[ModelFunction] = None self.mimplication: Optional[ModelFunction] = None
self.num_necessitation: int = 0
self.mnecessitation: Optional[ModelFunction] = None self.mnecessitation: Optional[ModelFunction] = None
# Map symbol to model function
self.custom_model_functions: Dict[str, ModelFunction] = {}
class Stage:
def __init__(self, name: str):
self.name = name
self.next: Optional['Stage'] = None
self.previous: Optional['Stage'] = None
# This corresponds to a portion of the model name in MaGIC
self.num = 0
def increment(self):
self.num += 1
def reset(self):
self.num = 0
def __str__(self):
return self.name
class Stages:
def __init__(self):
end_stage = Stage("end")
self.stages: Dict[str, Stage] = {"end": end_stage}
self.last_added_stage: Stage = end_stage
self.first_stage: Optional[Stage] = None
def add(self, name: str):
stage = Stage(name)
stage.next = stage
stage.previous = self.last_added_stage
# End stage is a sink so don't
# mark any stages as next
if self.last_added_stage.name != "end":
self.last_added_stage.next = stage
else:
# If this is triggered, than this is the first
# stage added
self.first_stage = stage
self.stages[name] = stage
self.last_added_stage = stage
def reset_after(self, name):
"""
Resets the stage counters after a given stage.
This is to accurately reflect the name of the
model within MaGIC.
"""
stage = self.stages[name]
while stage.name != "process_model":
stage.reset()
stage = stage.next
def get(self, name):
return self.stages[name]
def name(self):
result = ""
stage = self.first_stage
if stage is None:
return ""
result = f"{stage.num}"
if stage.next == "process_model":
return result
stage = stage.next
while stage is not None:
result += f".{stage.num}"
if stage.next.name != "process_model":
stage = stage.next
else:
stage = None
return result
def derive_stages(header: UglyHeader) -> Stages:
stages = Stages()
stages.add("size")
if header.negation:
stages.add("negation")
stages.add("order")
stages.add("designated")
stages.add("implication")
if header.necessitation:
stages.add("necessitation")
for (adicity, symbol) in header.custom_model_functions:
stages.add(f"custom--{adicity}--{symbol}")
stages.add("process_model")
# After processing the model, go to the previous stage
stages.get("process_model").next = stages.get("process_model").previous
return stages
def parse_matrices(infile: SourceFile) -> List[Tuple[Model, Dict]]: def parse_matrices(infile: SourceFile) -> List[Tuple[Model, Dict]]:
solutions = [] # Reset solutions = []
header = parse_header(infile) header = parse_header(infile)
stages = derive_stages(header)
first_run = True
current_model_parts = ModelBuilder() current_model_parts = ModelBuilder()
process_sizes(infile, header, current_model_parts, solutions) stage = stages.first_stage
while True:
match stage.name:
case "end":
break
case "process_model":
process_model(stages.name(), current_model_parts, solutions)
stage = stage.next
case "size":
processed = process_sizes(infile, current_model_parts, first_run)
first_run = False
if processed:
stage.num = current_model_parts.size + 1
stage = stage.next
else:
stages.reset_after(stage.name)
stage = stage.previous
case "negation":
processed = process_negations(infile, current_model_parts)
if processed:
stage.increment()
stage = stage.next
else:
stages.reset_after(stage.name)
stage = stage.previous
case "order":
processed = process_orders(infile, current_model_parts)
if processed:
stage.increment()
stage = stage.next
else:
stages.reset_after(stage.name)
stage = stage.previous
case "designated":
processed = process_designateds(infile, current_model_parts)
if processed:
stage.increment()
stage = stage.next
else:
stages.reset_after(stage.name)
stage = stage.previous
case "implication":
processed = process_implications(infile, current_model_parts)
if processed:
stage.increment()
stage = stage.next
else:
stages.reset_after(stage.name)
stage = stage.previous
case "necessitation":
processed = process_necessitations(infile, current_model_parts)
if processed:
stage.increment()
stage = stage.next
else:
stages.reset_after(stage.name)
stage = stage.previous
case _:
custom_stage = re.search(r"custom--(\d+)--(\S+)", stage.name)
if custom_stage is None or len(custom_stage.groups()) != 2:
raise NotImplementedError(f"Unrecognized Stage: {stage.name}")
adicity, symbol = custom_stage.groups()
adicity = int(adicity)
processed = process_custom_connective(infile, symbol, adicity, current_model_parts)
if processed:
stage.increment()
stage = stage.next
else:
stages.reset_after(stage.name)
stage = stage.previous
return solutions return solutions
def process_sizes(infile: SourceFile, header: UglyHeader, current_model_parts: ModelBuilder, solutions: List[Tuple[Model, Dict]]): def process_sizes(infile: SourceFile, current_model_parts: ModelBuilder, first_run: bool) -> bool:
"""Stage 1""" try:
size = parse_size(infile, first_run)
except StopIteration:
return False
if size is None:
return False
first_run = True carrier_set = carrier_set_from_size(size)
current_model_parts.carrier_set = carrier_set
current_model_parts.size = size
while True: return True
print("Processing next size")
try:
size = parse_size(infile, first_run)
first_run = False
except StopIteration:
# For some reason, when necessitation is enabled this doesn't
# have a -1 on the last line
break
if size is None:
break
carrier_set = carrier_set_from_size(size) def process_negations(infile: SourceFile, current_model_parts: ModelBuilder) -> bool:
current_model_parts.size = size
current_model_parts.carrier_set = carrier_set
process_negations(infile, header, current_model_parts, solutions)
def process_negations(infile: SourceFile, header: UglyHeader, current_model_parts: ModelBuilder, solutions: List[Tuple[Model, Dict]]):
"""Stage 2 (Optional)""" """Stage 2 (Optional)"""
num_negation = 0 mnegation = parse_single_monadic_connective(infile, "¬", current_model_parts.size)
while True: if mnegation is None:
print("Processing next negation") return False
mnegation = None
if header.negation:
mnegation = parse_single_negation(infile, current_model_parts.size)
if mnegation is None:
break
num_negation += 1
current_model_parts.num_negation = num_negation current_model_parts.mnegation = mnegation
current_model_parts.mnegation = mnegation return True
process_orders(infile, header, current_model_parts, solutions) def process_orders(infile: SourceFile, current_model_parts: ModelBuilder) -> bool:
if not header.negation:
break
def process_orders(infile: SourceFile, header: UglyHeader, current_model_parts: ModelBuilder, solutions: List[Tuple[Model, Dict]]):
"""Stage 3""" """Stage 3"""
num_order = 0 result = parse_single_order(infile, current_model_parts.size)
while True: if result is None:
print("Processing next order") return False
result = parse_single_order(infile, current_model_parts.size)
if result is None:
break
num_order += 1
mconjunction, mdisjunction = result
current_model_parts.num_order = num_order
current_model_parts.mconjunction = mconjunction
current_model_parts.mdisjunction = mdisjunction
process_designateds(infile, header, current_model_parts, solutions)
def process_designateds(infile: SourceFile, header: UglyHeader, current_model_parts: ModelBuilder, solutions: List[Tuple[Model, Dict]]): mconjunction, mdisjunction = result
current_model_parts.mconjunction = mconjunction
current_model_parts.mdisjunction = mdisjunction
return True
def process_designateds(infile: SourceFile, current_model_parts: ModelBuilder) -> bool:
"""Stage 4""" """Stage 4"""
num_designated = 0 designated_values = parse_single_designated(infile, current_model_parts.size)
while True: if designated_values is None:
print("Processing next designated") return False
designated_values = parse_single_designated(infile, current_model_parts.size)
if designated_values is None:
break
num_designated += 1
current_model_parts.num_designated = num_designated
current_model_parts.designated_values = designated_values
process_implications(infile, header, current_model_parts, solutions)
def process_implications( current_model_parts.designated_values = designated_values
infile: SourceFile, header: UglyHeader, current_model_parts: ModelBuilder, solutions: List[Tuple[Model, Dict]]): return True
def process_implications(infile: SourceFile, current_model_parts: ModelBuilder) -> bool:
"""Stage 5""" """Stage 5"""
if header.necessitation: mimplication = parse_single_dyadic_connective(infile, "", current_model_parts.size)
num_implication = 0 if mimplication is None:
while True: return False
print("Processing next implication")
instr = next(infile).strip()
mimplication, reststr = parse_single_implication(instr, infile.current_line, current_model_parts.size)
if mimplication is None:
break
num_implication += 1
current_model_parts.num_implication = num_implication
current_model_parts.mimplication = mimplication
process_necessitations(infile, reststr, header, current_model_parts, solutions)
else:
results = parse_implications(infile, current_model_parts.size)
for num_implication, mimplication in enumerate(results, 1):
current_model_parts.num_implication = num_implication
current_model_parts.mimplication = mimplication
process_model(current_model_parts, solutions)
def process_necessitations(infile: SourceFile, instr: str, header: UglyHeader, current_model_parts: ModelBuilder, solutions: List[Tuple[Model, Dict]]): current_model_parts.mimplication = mimplication
return True
# NOTE: For some reason, one necessitation table will be on the same line as the implication table def process_necessitations(infile: SourceFile, current_model_parts: ModelBuilder) -> bool:
mnecessitation = parse_single_necessitation_from_str(instr, infile.current_line, current_model_parts.size) mnecessitation = parse_single_monadic_connective(infile, "!", current_model_parts.size)
assert mnecessitation is not None, f"Expected Necessitation Table at line {infile.current_line}" if mnecessitation is None:
num_necessitation = 1 return False
current_model_parts.num_necessitation = num_necessitation
current_model_parts.mnecessitation = mnecessitation current_model_parts.mnecessitation = mnecessitation
process_model(current_model_parts, solutions) return True
while True: def process_custom_connective(infile: SourceFile, symbol: str, adicity: int, current_model_parts: ModelBuilder) -> bool:
print("Processing next necessitation") if adicity == 0:
mnecessitation = parse_single_necessitation(infile, current_model_parts.size) mfunction = parse_single_nullary_connective(infile, symbol)
if mnecessitation is None: elif adicity == 1:
break mfunction = parse_single_monadic_connective(infile, symbol, current_model_parts.size)
num_necessitation += 1 elif adicity == 2:
mfunction = parse_single_dyadic_connective(infile, symbol, current_model_parts.size)
else:
raise NotImplementedError("Unable to process connectives of adicity greater than 2")
current_model_parts.num_necessitation = num_necessitation if mfunction is None:
current_model_parts.mnecessitation = mnecessitation return False
process_model(current_model_parts, solutions)
def process_model(mp: ModelBuilder, solutions: List[Tuple[Model, Dict]]): current_model_parts.custom_model_functions[symbol] = mfunction
return True
def process_model(model_name: str, mp: ModelBuilder, solutions: List[Tuple[Model, Dict]]):
"""Create Model""" """Create Model"""
assert mp.size > 0
assert mp.size + 1 == len(mp.carrier_set)
assert len(mp.designated_values) <= len(mp.carrier_set)
assert mp.mimplication is not None assert mp.mimplication is not None
assert len(mp.carrier_set) > 0
logical_operations = { mp.mimplication } logical_operations = { mp.mimplication }
model_name = f"{mp.size}{'.' + str(mp.num_negation) if mp.num_negation != 0 else ''}.{mp.num_order}.{mp.num_designated}.{mp.num_implication}{'.' + str(mp.num_necessitation) if mp.num_necessitation != 0 else ''}"
model = Model(mp.carrier_set, logical_operations, mp.designated_values, name=model_name) model = Model(mp.carrier_set, logical_operations, mp.designated_values, name=model_name)
interpretation = { interpretation = {
Implication: mp.mimplication Implication: mp.mimplication
@ -193,23 +351,34 @@ def process_model(mp: ModelBuilder, solutions: List[Tuple[Model, Dict]]):
logical_operations.add(mp.mnecessitation) logical_operations.add(mp.mnecessitation)
interpretation[Necessitation] = mp.mnecessitation interpretation[Necessitation] = mp.mnecessitation
for custom_mf in mp.custom_model_functions.values():
if custom_mf is not None:
logical_operations.add(custom_mf)
op = Operation(custom_mf.operation_name, custom_mf.arity)
interpretation[op] = custom_mf
solutions.append((model, interpretation)) solutions.append((model, interpretation))
print(f"Parsed Matrix {model.name}")
def parse_header(infile: SourceFile) -> UglyHeader: def parse_header(infile: SourceFile) -> UglyHeader:
""" """
Parse the header line from the ugly data format. Parse the header line from the ugly data format.
NOTE: Currently Incomplete.
""" """
header_line = next(infile).strip() header_line = infile.next_line()
header_tokens = header_line.split(" ") header_tokens = header_line.split(" ")
assert header_tokens[0] in ["0", "1"] assert header_tokens[0] in ["0", "1"]
assert header_tokens[6] in ["0", "1"] assert header_tokens[6] in ["0", "1"]
assert len(header_tokens) >= 7
negation_defined = bool(int(header_tokens[0])) negation_defined = bool(int(header_tokens[0]))
necessitation_defined = bool(int(header_tokens[6])) necessitation_defined = bool(int(header_tokens[6]))
return UglyHeader(negation_defined, necessitation_defined) num_custom_connectives = int(header_tokens[7])
custom_model_functions: List[Tuple[int, str]] = []
for i in range(num_custom_connectives):
arity = int(header_tokens[7 + (2 * i) + 1])
symbol = header_tokens[7 + (2 * i) + 2]
custom_model_functions.append((arity, symbol))
return UglyHeader(negation_defined, necessitation_defined, custom_model_functions)
def carrier_set_from_size(size: int): def carrier_set_from_size(size: int) -> Set[ModelValue]:
""" """
Construct a carrier set of model values Construct a carrier set of model values
based on the desired size. based on the desired size.
@ -222,37 +391,21 @@ def parse_size(infile: SourceFile, first_run: bool) -> Optional[int]:
""" """
Parse the line representing the matrix size. Parse the line representing the matrix size.
""" """
size = int(next(infile))
# HACK: The first size line may be -1 due to a bug. Skip it size = int(infile.next_line())
if size == -1 and first_run:
size = int(next(infile)) # HACK: When necessitation and custom connectives are enabled
# MaGIC may produce -1s at the beginning of the file
if first_run:
while size == -1:
size = int(infile.next_line())
if size == -1: if size == -1:
return None return None
assert size > 0, f"Unexpected size at line {infile.current_line}" assert size > 0, f"Unexpected size at line {infile.current_line}"
return size return size
def parse_single_negation(infile: SourceFile, size: int) -> Optional[ModelFunction]: def mvalue_from_index(i: int) -> ModelValue:
"""
Parse the line representing the negation table.
"""
line = next(infile).strip()
if line == '-1':
return None
row = line.split(" ")
assert len(row) == size + 1, f"Negation table doesn't match size at line {infile.current_line}"
mapping = {}
for i, j in zip(range(size + 1), row):
x = mvalue_from_index(i)
y = parse_mvalue(j)
mapping[(x, )] = y
return ModelFunction(1, mapping, "¬")
def mvalue_from_index(i: int):
""" """
Given an index, return the Given an index, return the
representation of the model value. representation of the model value.
@ -317,7 +470,7 @@ def parse_single_order(infile: SourceFile, size: int) -> Optional[Tuple[ModelFun
""" """
Parse the line representing the ordering table Parse the line representing the ordering table
""" """
line = next(infile).strip() line = infile.next_line()
if line == '-1': if line == '-1':
return None return None
@ -338,7 +491,6 @@ def parse_single_order(infile: SourceFile, size: int) -> Optional[Tuple[ModelFun
cmapping = {} cmapping = {}
dmapping = {} dmapping = {}
for i in range(size + 1): for i in range(size + 1):
x = mvalue_from_index(i) x = mvalue_from_index(i)
for j in range(size + 1): for j in range(size + 1):
@ -368,7 +520,7 @@ def parse_single_designated(infile: SourceFile, size: int) -> Optional[Set[Model
""" """
Parse the line representing which model values are designated. Parse the line representing which model values are designated.
""" """
line = next(infile).strip() line = infile.next_line()
if line == '-1': if line == '-1':
return None return None
@ -385,17 +537,46 @@ def parse_single_designated(infile: SourceFile, size: int) -> Optional[Set[Model
return designated_values return designated_values
def parse_single_implication(instr: str, line: int, size: int) -> Tuple[ModelFunction, str]: def parse_single_nullary_connective(infile: SourceFile, symbol: str) -> Optional[ModelFunction]:
""" line = infile.next_line()
Take the current string, parse an implication table from it, if line == "-1":
and return along with it the remainder of the string return None
"""
if instr == "-1":
return None, ""
table = instr.split(" ") row = line.split(" ")
assert len(row) == 1, f"More than one assignment for a nullary connective was provided at line {infile.current_line}"
assert len(table) >= (size + 1)**2, f"Implication table does not match expected size at line {line}" mapping = {}
mapping[()] = parse_mvalue(row[0])
return ModelFunction(0, mapping, symbol)
def parse_single_monadic_connective(infile: SourceFile, symbol: str, size: int) -> Optional[ModelFunction]:
line = infile.next_line()
if line == '-1':
return None
row = line.split(" ")
assert len(row) == size + 1, f"{symbol} table doesn't match size at line {infile.current_line}"
mapping = {}
for i, j in zip(range(size + 1), row):
x = mvalue_from_index(i)
y = parse_mvalue(j)
mapping[(x, )] = y
return ModelFunction(1, mapping, symbol)
def parse_single_dyadic_connective(infile: SourceFile, symbol: str, size: int) -> Optional[ModelFunction]:
first_token = next(infile)
if first_token == "-1":
return None
table = []
try:
table = [first_token] + [next(infile) for _ in range((size + 1)**2 - 1)]
except StopIteration:
pass
assert len(table) == (size + 1)**2, f"{symbol} table does not match expected size at line {infile.current_line}"
mapping = {} mapping = {}
table_i = 0 table_i = 0
@ -409,81 +590,4 @@ def parse_single_implication(instr: str, line: int, size: int) -> Tuple[ModelFun
mapping[(x, y)] = r mapping[(x, y)] = r
mimplication = ModelFunction(2, mapping, "") return ModelFunction(2, mapping, symbol)
reststr = " ".join(table[(size + 1)**2:])
return mimplication, reststr
def parse_implications(infile: SourceFile, size: int) -> List[ModelFunction]:
"""
Parse the line representing the list of implication
tables.
"""
line = next(infile).strip()
# Split and remove the last '-1' character
table = line.split(" ")[:-1]
assert len(table) % (size + 1)**2 == 0, f"Implication table does not match expected size at line {infile.current_line}"
table_i = 0
mimplications: List[ModelFunction] = []
for _ in range(len(table) // (size + 1)**2):
mapping = {}
for i in range(size + 1):
x = mvalue_from_index(i)
for j in range(size + 1):
y = mvalue_from_index(j)
r = parse_mvalue(table[table_i])
table_i += 1
mapping[(x, y)] = r
mimplication = ModelFunction(2, mapping, "")
mimplications.append(mimplication)
return mimplications
def parse_single_necessitation_from_str(instr: str, line: int, size: int) -> Optional[ModelFunction]:
"""
Parse the line representing the necessitation table.
"""
if instr == "-1":
return None
row = instr.split(" ")
assert len(row) == size + 1, f"Necessitation table doesn't match size at line {line}"
mapping = {}
for i, j in zip(range(size + 1), row):
x = mvalue_from_index(i)
y = parse_mvalue(j)
mapping[(x, )] = y
return ModelFunction(1, mapping, "!")
def parse_single_necessitation(infile: SourceFile, size: int) -> Optional[ModelFunction]:
line = next(infile).strip()
return parse_single_necessitation_from_str(line, infile.current_line, size)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="VSP Checker")
parser.add_argument("--verbose", action='store_true', help="Print out all parsed matrices")
args = vars(parser.parse_args())
solutions = parse_matrices(SourceFile(sys.stdin))
print(f"Parsed {len(solutions)} matrices")
num_has_vsp = 0
for i, (model, interpretation) in enumerate(solutions):
vsp_result = has_vsp(model, interpretation)
print(vsp_result)
if args['verbose'] or vsp_result.has_vsp:
print(model)
if vsp_result.has_vsp:
num_has_vsp += 1
print(f"Tested {len(solutions)} models, {num_has_vsp} of which satisfy VSP")

65
vsp.py
View file

@ -8,7 +8,7 @@ from common import set_to_str
from model import ( from model import (
Model, model_closure, ModelFunction, ModelValue Model, model_closure, ModelFunction, ModelValue
) )
from logic import Implication, Operation from logic import Conjunction, Disjunction, Implication, Operation
def preseed( def preseed(
initial_set: Set[ModelValue], initial_set: Set[ModelValue],
@ -38,6 +38,40 @@ def preseed(
same_set = candidate_preseed[1] == 0 same_set = candidate_preseed[1] == 0
return candidate_preseed[0], same_set return candidate_preseed[0], same_set
def find_top(algebra: Set[ModelValue], mconjunction: Optional[ModelFunction], mdisjunction: Optional[ModelFunction]) -> Optional[ModelValue]:
"""
Find the top of the order lattice.
T || a = T, T && a = a for all a in the carrier set
"""
if mconjunction is None or mdisjunction is None:
return None
for x in algebra:
for y in algebra:
if mdisjunction(x, y) == x and mconjunction(x, y) == y:
return x
print("[Warning] Failed to find the top of the lattice")
return None
def find_bottom(algebra: Set[ModelValue], mconjunction: Optional[ModelFunction], mdisjunction: Optional[ModelFunction]) -> Optional[ModelValue]:
"""
Find the bottom of the order lattice
F || a = a, F && a = F for all a in the carrier set
"""
if mconjunction is None or mdisjunction is None:
return None
for x in algebra:
for y in algebra:
if mdisjunction(x, y) == y and mconjunction(x, y) == x:
return x
print("[Warning] Failed to find the bottom of the lattice")
return None
class VSP_Result: class VSP_Result:
def __init__( def __init__(
self, has_vsp: bool, model_name: Optional[str] = None, self, has_vsp: bool, model_name: Optional[str] = None,
@ -62,6 +96,10 @@ def has_vsp(model: Model, interpretation: Dict[Operation, ModelFunction]) -> VSP
sharing property. sharing property.
""" """
impfunction = interpretation[Implication] impfunction = interpretation[Implication]
mconjunction = interpretation.get(Conjunction)
mdisjunction = interpretation.get(Disjunction)
top = find_top(model.carrier_set, mconjunction, mdisjunction)
bottom = find_bottom(model.carrier_set, mconjunction, mdisjunction)
# NOTE: No models with only one designated # NOTE: No models with only one designated
# value satisfies VSP # value satisfies VSP
@ -101,28 +139,45 @@ def has_vsp(model: Model, interpretation: Dict[Operation, ModelFunction]) -> VSP
# NOTE: Optimziation before model_closure # NOTE: Optimziation before model_closure
# If the carrier set intersects, then move on to the next # If the two subalgebras intersect, move
# subalgebra # onto the next pair
if len(xs & ys) > 0: if len(xs & ys) > 0:
continue continue
# NOTE: Optimization
# if either subalgebra contains top or bottom, move
# onto the next pair
if top is not None and (top in xs or top in ys):
continue
if bottom is not None and (bottom in xs or bottom in ys):
continue
# Compute the closure of all operations # Compute the closure of all operations
# with just the xs # with just the xs
carrier_set_left: Set[ModelValue] = model_closure(xs, model.logical_operations) carrier_set_left: Set[ModelValue] = model_closure(xs, model.logical_operations, top, bottom)
# Save to cache # Save to cache
if cached_xs[0] is not None and not cached_ys[1]: if cached_xs[0] is not None and not cached_ys[1]:
closure_cache.append((orig_xs, carrier_set_left)) closure_cache.append((orig_xs, carrier_set_left))
if top is not None and top in carrier_set_left:
continue
if bottom is not None and bottom in carrier_set_left:
continue
# Compute the closure of all operations # Compute the closure of all operations
# with just the ys # with just the ys
carrier_set_right: Set[ModelValue] = model_closure(ys, model.logical_operations) carrier_set_right: Set[ModelValue] = model_closure(ys, model.logical_operations, top, bottom)
# Save to cache # Save to cache
if cached_ys[0] is not None and not cached_ys[1]: if cached_ys[0] is not None and not cached_ys[1]:
closure_cache.append((orig_ys, carrier_set_right)) closure_cache.append((orig_ys, carrier_set_right))
if top is not None and top in carrier_set_right:
continue
if bottom is not None and bottom in carrier_set_right:
continue
# If the carrier set intersects, then move on to the next # If the carrier set intersects, then move on to the next
# subalgebra # subalgebra

35
vspursuer.py Executable file
View file

@ -0,0 +1,35 @@
#!/usr/bin/env python3
import argparse
from parse_magic import (
SourceFile,
parse_matrices
)
from vsp import has_vsp
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="VSP Checker")
parser.add_argument("--verbose", action='store_true', help="Print out all parsed matrices")
parser.add_argument("-i", type=str, help="Path to MaGIC ugly data file")
args = vars(parser.parse_args())
data_file_path = args.get("i")
if data_file_path is None:
data_file_path = input("Path to MaGIC Ugly Data File: ")
solutions = []
with open(data_file_path, "r") as data_file:
solutions = parse_matrices(SourceFile(data_file))
print(f"Parsed {len(solutions)} matrices")
num_has_vsp = 0
for i, (model, interpretation) in enumerate(solutions):
vsp_result = has_vsp(model, interpretation)
print(vsp_result)
if args['verbose'] or vsp_result.has_vsp:
print(model)
if vsp_result.has_vsp:
num_has_vsp += 1
print(f"Tested {len(solutions)} models, {num_has_vsp} of which satisfy VSP")