1
Fork 0
mirror of https://git.cs.ou.nl/joshua.moerman/mealy-decompose.git synced 2025-04-30 02:07:44 +02:00

Greedily merge components (works, but unfortunately no better solution found yet)

This commit is contained in:
Joshua Moerman 2023-11-29 16:36:12 +01:00
parent 18e0b2f4d6
commit e20251c07d
4 changed files with 65 additions and 10 deletions

View file

@ -3,16 +3,18 @@ module Main where
import DotParser
import Mealy
import MealyRefine
import Partition
-- import Control.Monad.IO.Class (liftIO)
-- import Control.Monad.Trans.State.Strict
import Control.Monad (forM_)
-- import Data.Map.Strict qualified as Map
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.State.Strict
import Control.Monad (forM_, when, forever)
import Data.Map.Strict qualified as Map
import Data.Maybe (mapMaybe)
import Data.Partition (isRefinementOf, numBlocks)
-- import Data.Semigroup (Arg(..))
-- import Data.Set qualified as Set
-- import Data.List.Ordered (nubSort)
import Data.List (minimumBy)
import Data.Function (on)
import System.Environment
import Text.Megaparsec
@ -70,6 +72,29 @@ main = do
printPartition partition
)
let totalSize = sum (fmap (numBlocks . snd) projections)
putStrLn $ "total size = " <> show totalSize
let score p1 p2 p3 = numBlocks p3 - numBlocks p2 - numBlocks p1
combine (o1, p1) (o2, p2) = let p3 = commonRefinement p1 p2 in ((o1, o2, p3), score p1 p2 p3)
allCombs projs = [combine op1 op2 | op1 <- projs, op2 <- projs, fst op1 < fst op2]
minComb projs = minimumBy (compare `on` snd) (allCombs projs)
_ <- flip execStateT (Map.fromList projections, totalSize) $ forever (do
(projmap, currentSize) <- get
liftIO . print . fmap numBlocks . Map.elems $ projmap
let ((o1, o2, p3), gain) = minComb (Map.assocs projmap)
o3 = o1 <> "x" <> o2
newSize = currentSize + gain
newProjmap = Map.insert o3 p3 . Map.delete o2 . Map.delete o1 $ projmap
liftIO $ putStrLn (show o3 <> " -> " <> show newSize)
put (newProjmap, newSize)
)
print "done"
{-
-- Check refinement relations for all pairs

View file

@ -14,7 +14,8 @@ common stuff
containers,
copar,
data-ordlist,
megaparsec
megaparsec,
transformers
default-language: GHC2021
default-extensions:
RecordWildCards
@ -26,7 +27,8 @@ library
exposed-modules:
DotParser,
Mealy,
MealyRefine
MealyRefine,
Partition
build-depends:
vector
@ -35,8 +37,7 @@ executable mealy-decompose
hs-source-dirs: app
main-is: Main.hs
build-depends:
mealy-decompose,
transformers
mealy-decompose
test-suite mealy-decompose-test
import: stuff

View file

@ -1,6 +1,7 @@
module MealyRefine where
import Mealy
import Partition (Partition)
import Control.Monad.ST (runST)
import Copar.Algorithm (refine)
@ -9,7 +10,6 @@ import Copar.RefinementInterface (Label, F1)
import Data.Bool (bool)
import Data.CoalgebraEncoding (Encoding(..))
import Data.Map.Strict qualified as Map
import Data.Partition (Partition)
import Data.Proxy (Proxy(..))
import Data.Vector qualified
import Data.Vector.Unboxed qualified

29
src/Partition.hs Normal file
View file

@ -0,0 +1,29 @@
module Partition
( module Partition
, module Data.Partition
) where
import Control.Monad.Trans.State.Strict (runState, get, put)
import Data.Partition (Partition(..), isRefinementOf, numStates)
import Data.Vector qualified as V
import Data.Map.Strict qualified as Map
import Unsafe.Coerce (unsafeCoerce)
-- Returns the coarsest partition which is finer than either input
-- i.e., the greatest lower bound
commonRefinement :: Partition -> Partition -> Partition
commonRefinement p1 p2 =
let n = numStates p1
sa1 = (stateAssignment p1 V.!)
sa2 = (stateAssignment p2 V.!)
blockAtIdx i = do
(m, b) <- get
let key = (sa1 i, sa2 i)
case Map.lookup key m of
Just b0 -> return b0
Nothing -> do
put (Map.insert key b m, succ b)
return b
(vect, (_, nextBlock)) = runState (V.generateM n blockAtIdx) (Map.empty, 0)
in Partition { numBlocks = unsafeCoerce nextBlock, stateAssignment = vect }