diff --git a/.gitignore b/.gitignore index c0bae7176..dfc643419 100644 --- a/.gitignore +++ b/.gitignore @@ -32,3 +32,4 @@ out/ *.zo __pycache__ stats/ +res.json diff --git a/build.sbt b/build.sbt index 6d10e4b5d..ffcbce34f 100644 --- a/build.sbt +++ b/build.sbt @@ -23,6 +23,8 @@ lazy val maf = crossProject(JVMPlatform, JSPlatform) libraryDependencies += ("com.typesafe.akka" %% "akka-actor-typed" % "2.6.18").cross(CrossVersion.for3Use2_13), libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.2.10", libraryDependencies += "com.typesafe" % "config" % "1.4.1", + libraryDependencies += "io.bullet" %% "borer-core" % "1.10.0", + libraryDependencies += "io.bullet" %% "borer-derivation" % "1.10.0", /** Compilation options */ maxErrors := 5, /** Configuration for running the tests */ diff --git a/code/jvm/src/main/scala/maf/cli/experiments/SchemeAnalyses.scala b/code/jvm/src/main/scala/maf/cli/experiments/SchemeAnalyses.scala index b83db9afb..486d41702 100644 --- a/code/jvm/src/main/scala/maf/cli/experiments/SchemeAnalyses.scala +++ b/code/jvm/src/main/scala/maf/cli/experiments/SchemeAnalyses.scala @@ -21,6 +21,9 @@ import maf.modular.scheme.modactor.mirrors.ModActorWithMirrors import maf.modular.scheme.modactor.mirrors.SimpleModActorWithMirrors import maf.language.racket.RacketLoaderSemantics import maf.language.racket.RacketLoader +import maf.save.LoadFIFOWorklist +import maf.save.LoadModF +import maf.save.SaveModF object SchemeAnalysesBoundedDomain: object NoSensitivity: @@ -55,8 +58,15 @@ object SchemeAnalyses: def contextInsensitiveAnalysis( prg: SchemeExp - ) = new SimpleSchemeModFAnalysis(prg) with SchemeModFNoSensitivity with SchemeConstantPropagationDomain with FIFOWorklistAlgorithm[SchemeExp] { + ) = new SimpleSchemeModFAnalysis(prg) + with SchemeModFNoSensitivity + with SchemeConstantPropagationDomain + with FIFOWorklistAlgorithm[SchemeExp] + with SaveModF + with LoadModF + with LoadFIFOWorklist[SchemeExp] { override def toString = "no-sensitivity" + override val analysisName: String = "modf" } //def contextInsensitiveAnalysisRacket( diff --git a/code/jvm/src/main/scala/maf/cli/experiments/performance/PersistencePerformance.scala b/code/jvm/src/main/scala/maf/cli/experiments/performance/PersistencePerformance.scala new file mode 100644 index 000000000..7d3780a16 --- /dev/null +++ b/code/jvm/src/main/scala/maf/cli/experiments/performance/PersistencePerformance.scala @@ -0,0 +1,370 @@ +package maf.cli.experiments.performance + +import maf.cli.experiments.SchemeAnalyses +import maf.cli.experiments.performance.PerformanceEvaluationJMHPersistence.AnalyzedProgram +import maf.cli.experiments.performance.PerformanceEvaluationJMHPersistence.Program +import maf.cli.experiments.performance.PerformanceEvaluationJMHPersistence.ProgramPath +import maf.cli.experiments.performance.PerformanceEvaluationJMHPersistence.SavedProgram +import maf.cli.experiments.performance.PerformanceEvaluationJMHPersistence.testSizeFile +import maf.core.Expression +import maf.language.CScheme.CSchemeParser +import maf.language.scheme.SchemeExp +import maf.modular.AnalysisEntry +import maf.modular.ModAnalysis +import maf.modular.scheme.SchemeConstantPropagationDomain +import maf.modular.scheme.modf.SchemeModFNoSensitivity +import maf.modular.scheme.modf.SimpleSchemeModFAnalysis +import maf.modular.worklist.FIFOWorklistAlgorithm +import maf.save.Load +import maf.save.LoadActualComponents +import maf.save.LoadActualExpressions +import maf.save.LoadAddrDependency +import maf.save.LoadComponentIntID +import maf.save.LoadComponents +import maf.save.LoadDependency +import maf.save.LoadExpressionIntID +import maf.save.LoadFIFOWorklist +import maf.save.LoadGlobalStore +import maf.save.LoadInitialized +import maf.save.LoadModularSchemeDomain +import maf.save.LoadNoContext +import maf.save.LoadSchemeAddr +import maf.save.LoadSchemeConstantPropagationDomain +import maf.save.LoadSchemeExpressions +import maf.save.LoadStandardSchemeComponentPosition +import maf.save.LoadStandardSchemeComponents +import maf.save.LoadWorklist +import maf.save.Save +import maf.save.SaveActualComponents +import maf.save.SaveAddrDep +import maf.save.SaveComponentIntID +import maf.save.SaveComponents +import maf.save.SaveDependency +import maf.save.SaveGlobalStore +import maf.save.SaveInitialized +import maf.save.SaveModularSchemeDomain +import maf.save.SaveNoContext +import maf.save.SaveSchemeAddr +import maf.save.SaveSchemeConstantPropagationDomain +import maf.save.SaveSequentialWorklist +import maf.save.SaveStandardSchemeComponentPosition +import maf.save.SaveStandardSchemeComponents +import maf.save.SaveWorklist +import maf.save.save.SaveActualExpressions +import maf.save.save.SaveExpressionID +import maf.save.save.SaveExpressions +import maf.save.save.SaveRecursiveSchemeExpressionsIntID +import maf.save.save.SaveSchemeExpressions +import maf.save.save.SaveWorklistExpressionsID +import maf.util.Reader +import org.openjdk.jmh.annotations.* +import org.openjdk.jmh.infra.BenchmarkParams +import org.openjdk.jmh.infra.BenchmarkParamsL2 +import org.openjdk.jmh.infra.IterationParams +import org.openjdk.jmh.profile.ExternalProfiler +import org.openjdk.jmh.profile.InternalProfiler +import org.openjdk.jmh.profile.Profiler +import org.openjdk.jmh.results.AggregationPolicy +import org.openjdk.jmh.results.BenchmarkResult +import org.openjdk.jmh.results.BenchmarkResultMetaData +import org.openjdk.jmh.results.IterationResult +import org.openjdk.jmh.results.Result +import org.openjdk.jmh.results.ScalarResult +import org.openjdk.jmh.util.Multimap + +import java.io.File +import java.io.IOException +import java.nio.file.Files +import java.nio.file.Path +import java.nio.file.Paths +import java.nio.file.StandardCopyOption +import java.util.ArrayList +import java.util.Collection +import java.util.concurrent.TimeUnit +import maf.save.save.SaveExpressionIntID +import maf.save.SaveCbor +import maf.save.LoadCbor +import maf.save.save.SaveMainSchemeBody +import maf.save.LoadMainSchemeBody + +object PersistenceEvaluation: + trait SaveEvaluation[Expr <: Expression] + extends Save[Expr] + with SaveInitialized[Expr] + with SaveComponents[Expr] + with SaveWorklist[Expr] + with SaveGlobalStore[Expr] + with SaveDependency[Expr] + with SaveAddrDep[Expr] + + trait SaveModF + extends SaveEvaluation[SchemeExp] + with SaveStandardSchemeComponents + with SaveModularSchemeDomain + with SaveSchemeConstantPropagationDomain + with SaveSchemeAddr + with SaveSchemeExpressions + with SaveNoContext[SchemeExp] + with SaveSequentialWorklist[SchemeExp] + with SaveMainSchemeBody + + trait LoadEvaluation[Expr <: Expression] + extends Load[Expr] + with LoadInitialized[Expr] + with LoadComponents[Expr] + with LoadWorklist[Expr] + with LoadGlobalStore[Expr] + with LoadDependency[Expr] + with LoadAddrDependency[Expr] + + trait LoadModF + extends LoadEvaluation[SchemeExp] + with LoadStandardSchemeComponents + with LoadModularSchemeDomain + with LoadSchemeConstantPropagationDomain + with LoadSchemeAddr + with LoadFIFOWorklist[SchemeExp] + with LoadSchemeExpressions + with LoadNoContext[SchemeExp] + with LoadMainSchemeBody + + trait SimpleModF + extends SimpleSchemeModFAnalysis + with SchemeModFNoSensitivity + with SchemeConstantPropagationDomain + with FIFOWorklistAlgorithm[SchemeExp] + with SaveModF + with LoadModF + + class SimpleModFActual(program: SchemeExp) + extends SimpleSchemeModFAnalysis(program) + with SaveActualExpressions[SchemeExp] + with LoadActualExpressions[SchemeExp] + with SaveActualComponents[SchemeExp] + with LoadActualComponents[SchemeExp] + with SimpleModF + + class SimpleModFPositionComponents(program: SchemeExp) + extends SimpleSchemeModFAnalysis(program) + with SaveActualExpressions[SchemeExp] + with LoadActualExpressions[SchemeExp] + with SaveStandardSchemeComponentPosition + with LoadStandardSchemeComponentPosition + with SimpleModF + + class SimpleModFIDComponents(program: SchemeExp) + extends SimpleSchemeModFAnalysis(program) + with SaveActualExpressions[SchemeExp] + with LoadActualExpressions[SchemeExp] + with SaveComponentIntID[SchemeExp] + with LoadComponentIntID[SchemeExp] + with SimpleModF + + class SimpleModFIDExpressions(program: SchemeExp) + extends SimpleSchemeModFAnalysis(program) + with SaveExpressionIntID[SchemeExp] + with LoadExpressionIntID[SchemeExp] + with SaveActualComponents[SchemeExp] + with LoadActualComponents[SchemeExp] + with SimpleModF + + class SimpleModFIDSchemeExpressions(program: SchemeExp, override val maxASTHeight: Int) + extends SimpleSchemeModFAnalysis(program) + with SaveRecursiveSchemeExpressionsIntID + with LoadExpressionIntID[SchemeExp] + with SaveActualComponents[SchemeExp] + with LoadActualComponents[SchemeExp] + with SimpleModF + + class SimpleModFIDs(program: SchemeExp, override val maxASTHeight: Int) + extends SimpleSchemeModFAnalysis(program) + with SaveRecursiveSchemeExpressionsIntID + with LoadExpressionIntID[SchemeExp] + with SaveComponentIntID[SchemeExp] + with LoadComponentIntID[SchemeExp] + with SimpleModF + + class SimpleModFActualCbor(program: SchemeExp) extends SimpleModFActual(program) with SaveCbor[SchemeExp] with LoadCbor[SchemeExp] + class SimpleModFPositionComponentsCbor(program: SchemeExp) + extends SimpleModFPositionComponents(program) + with SaveCbor[SchemeExp] + with LoadCbor[SchemeExp] + class SimpleModFIDComponentsCbor(program: SchemeExp) extends SimpleModFIDComponents(program) with SaveCbor[SchemeExp] with LoadCbor[SchemeExp] + class SimpleModFIDExpressionsCbor(program: SchemeExp) extends SimpleModFIDExpressions(program) with SaveCbor[SchemeExp] with LoadCbor[SchemeExp] + class SimpleModFIDSchemeExpressionsCbor(program: SchemeExp, maxASTHeight: Int) + extends SimpleModFIDSchemeExpressions(program, maxASTHeight) + with SaveCbor[SchemeExp] + with LoadCbor[SchemeExp] + class SimpleModFIDsCbor(program: SchemeExp, maxASTHeight: Int) + extends SimpleModFIDs(program, maxASTHeight) + with SaveCbor[SchemeExp] + with LoadCbor[SchemeExp] + + final val simpleModFActual = "simpleModFActual" + final val simpleModFPositionComponents = "simpleModFPositionComponents" + final val simpleModFIDComponents = "simpleModFIDComponents" + final val simpleModFIDExpressions = "simpleModFIDExpressions" + final val simpleModFIDSchemeExpressions = "simpleModFIDSchemeExpressions" + final val simpleModFIDs = "simpleModFIDs" + +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@BenchmarkMode(Array(Mode.AverageTime)) +class PerformanceEvaluationJMHPersistence: + @Benchmark + def analyze(program: Program): ModAnalysis[SchemeExp] = + program.analysis.analyze() + return program.analysis + + @Benchmark + def save(program: AnalyzedProgram): Unit = program.analyzedProgram.save(program.saveFile.toString()) + + @Benchmark + def load(program: SavedProgram): ModAnalysis[SchemeExp] = + program.loadAnalysis.load(program.saveFile.toString()) + return program.loadAnalysis + +object PerformanceEvaluationJMHPersistence: + // Use a static path to ensure that this file can be accessed throughout different JVM forks, this is necessary to allow it to be read by the profiler + final val testSizeFile = + Paths.get(System.getProperty("java.io.tmpdir").asInstanceOf[String] + "/testMAFPersistenceFileSize.json").asInstanceOf[Path] + @State(Scope.Benchmark) + class ProgramPath: + @Param( + Array( + "test/R5RS/ad/bst.scm", + "test/R5RS/ad/queue.scm", + "test/R5RS/ad/linear.scm", + "test/R5RS/various/infinite-2.scm", + "test/R5RS/various/mceval.scm", + "test/R5RS/icp/icp_3_leval.scm", + "test/R5RS/gambit/sboyer.scm", + "test/R5RS/gambit/scheme.scm", + ) + ) + var program: String = _ + @Param( + Array( + PersistenceEvaluation.simpleModFActual, + PersistenceEvaluation.simpleModFPositionComponents, + PersistenceEvaluation.simpleModFIDComponents, + PersistenceEvaluation.simpleModFIDExpressions, + PersistenceEvaluation.simpleModFIDs, + PersistenceEvaluation.simpleModFIDSchemeExpressions + ) + ) + var runAnalysis: String = _ + @Param(Array("0", "1", "2", "3", "5")) + var maxASTHeight: Int = _ + @Param(Array("true", "false")) + var cbor: Boolean = _ + + @Setup(Level.Trial) + def skipMaxASTHeight: Unit = + if maxASTHeight != 0 && !(runAnalysis == PersistenceEvaluation.simpleModFIDSchemeExpressions || runAnalysis == PersistenceEvaluation.simpleModFIDs) then + System.exit(0) + + def getAnalysis( + program: SchemeExp, + analysis: String, + maxADTHeight: Int = maxASTHeight + ): ModAnalysis[SchemeExp] = + runAnalysis match { + case PersistenceEvaluation.simpleModFActual => + if cbor then return new PersistenceEvaluation.SimpleModFActualCbor(program) + else return new PersistenceEvaluation.SimpleModFActual(program) + case PersistenceEvaluation.simpleModFPositionComponents => + return if cbor then new PersistenceEvaluation.SimpleModFPositionComponentsCbor(program) + else new PersistenceEvaluation.SimpleModFPositionComponents(program) + case PersistenceEvaluation.simpleModFIDComponents => + return if cbor then new PersistenceEvaluation.SimpleModFIDComponentsCbor(program) + else new PersistenceEvaluation.SimpleModFIDComponents(program) + case PersistenceEvaluation.simpleModFIDExpressions => + return if cbor then new PersistenceEvaluation.SimpleModFIDExpressionsCbor(program) + else new PersistenceEvaluation.SimpleModFIDExpressions(program) + case PersistenceEvaluation.simpleModFIDSchemeExpressions => + return if cbor then new PersistenceEvaluation.SimpleModFIDSchemeExpressionsCbor(program, maxADTHeight) + else new PersistenceEvaluation.SimpleModFIDSchemeExpressions(program, maxADTHeight) + case PersistenceEvaluation.simpleModFIDs => + return if cbor then new PersistenceEvaluation.SimpleModFIDsCbor(program, maxADTHeight) + else new PersistenceEvaluation.SimpleModFIDs(program, maxADTHeight) + } + + protected def getExpression(file: String): SchemeExp = return CSchemeParser.parseProgram(Reader.loadFile(s"../../$file")) + protected def newSaveFile: Path = + val saveFile = Files.createTempFile("maf", ".json") + if saveFile == null then return throw IOException("Could not create new temporary file.") + else return saveFile + + @State(Scope.Benchmark) + class Program extends ProgramPath: + var analysis: ModAnalysis[SchemeExp] = _ + + @Setup(Level.Invocation) + def loadProgram: Unit = + // Only run the analysis once for each program, since this does not change based on how it would be saved + if runAnalysis != PersistenceEvaluation.simpleModFActual || cbor then System.exit(0) + analysis = getAnalysis(getExpression(program), runAnalysis) + + @State(Scope.Benchmark) + class AnalyzedProgram extends ProgramPath: + var analyzedProgram: ModAnalysis[SchemeExp] = _ + var saveFile: Path = _ + + @Setup(Level.Invocation) + def createSaveFile: Unit = saveFile = newSaveFile + + @TearDown(Level.Invocation) + def removeSaveFile: Unit = Files.move(saveFile, testSizeFile, StandardCopyOption.REPLACE_EXISTING) + + @Setup(Level.Trial) + def analyzeProgram: Unit = + analyzedProgram = getAnalysis(getExpression(program), runAnalysis) + analyzedProgram.analyze() + + @State(Scope.Benchmark) + class SavedProgram extends ProgramPath: + var expression: SchemeExp = _ + var analyzedProgram: ModAnalysis[SchemeExp] = _ + var saveFile: Path = _ + var loadAnalysis: ModAnalysis[SchemeExp] = _ + + @TearDown(Level.Trial) + def removeSaveFile: Unit = Files.deleteIfExists(saveFile) + + @Setup(Level.Trial) + def loadProgram: Unit = + expression = getExpression(program) + loadAnalysis = getAnalysis(expression, runAnalysis) + analyzedProgram = getAnalysis(expression, runAnalysis) + analyzedProgram.analyze() + saveFile = newSaveFile + analyzedProgram.save(saveFile.toString()) + +/** + * This is a profile to be used by JMH, and can be used if you add `-prof maf.cli.experiments.performance.MaxMemoryProfiler` to the JMH command. This + * will add a profiler that looks at the file at `PerformanceEvaluationJMHPersistence.testSizeFile` and will report the generated file size in the JMH + * report. This is used in order to test how large a file is after a benchmark is run for the persistence benchmarks. + */ +class MaxMemoryProfiler extends ExternalProfiler: + override def addJVMInvokeOptions(params: BenchmarkParams): Collection[String] = return new ArrayList() + override def addJVMOptions(params: BenchmarkParams): Collection[String] = return new ArrayList() + override def allowPrintErr(): Boolean = true + override def allowPrintOut(): Boolean = true + override def beforeTrial(params: BenchmarkParams): Unit = + params.getForks() + return + override def getDescription(): String = return "Get the generated file size" + override def afterTrial(br: BenchmarkResult, pid: Long, stdOut: File, stdErr: File): Collection[_ <: Result[_]] = + val results: Collection[ScalarResult] = new ArrayList(); + if Files.exists(PerformanceEvaluationJMHPersistence.testSizeFile) then + results.add( + new ScalarResult("Saved file size", + Files.size(PerformanceEvaluationJMHPersistence.testSizeFile).toDouble / 1000, + "kB", + AggregationPolicy.MAX + ) + ); + Files.deleteIfExists(PerformanceEvaluationJMHPersistence.testSizeFile) + return results + Files.deleteIfExists(PerformanceEvaluationJMHPersistence.testSizeFile) + return results diff --git a/code/jvm/src/main/scala/maf/cli/runnables/Repl.scala b/code/jvm/src/main/scala/maf/cli/runnables/Repl.scala index e27c67c41..d261c441d 100644 --- a/code/jvm/src/main/scala/maf/cli/runnables/Repl.scala +++ b/code/jvm/src/main/scala/maf/cli/runnables/Repl.scala @@ -32,6 +32,8 @@ object Repl: | * -t TIMEOUT: run the analysis with the given timeout (in seconds). Defaults to 10. | * -dot set flag to enable outputting a FILENAME.dot file that contains a visualisation of results of the analysis, only works with "-f" | * -m load the given Racket module, use instead of "-f" + | * -s FILENAME: the name of the file where the analysis should be saved to when finished + | * -l FILENAME: the name of the file where the analysis should be loaded from """.stripMargin private val configurationsHelp: Map[String, String] = Map( @@ -79,7 +81,9 @@ object Repl: interactive: Boolean = false, performance: Boolean = false, dot: Boolean = false, - timeout: Long = 10): + timeout: Long = 10, + saveFile: Option[String] = None, + loadFile: Option[String] = None): def isEmpty: Boolean = remaining.isEmpty def continue(remaining: List[String]): ArgParser = this.copy(remaining = remaining) def setAnalysis(analysis: String): ArgParser = @@ -90,6 +94,14 @@ object Repl: ensureNotSet(this.filename, "filename") this.copy(filename = Some(filename)) + def setSaveFilename(filename: String): ArgParser = + ensureNotSet(this.saveFile, "saveFile") + this.copy(saveFile = Some(filename)) + + def setLoadFilename(filename: String): ArgParser = + ensureNotSet(this.loadFile, "loadFile") + this.copy(loadFile = Some(filename)) + def setParser(parser: String): ArgParser = ensureNotSet(this.parser, "parser") this.copy(parser = Some(parser)) @@ -132,6 +144,12 @@ object Repl: case "-dot" :: rest => parse(parser.copy(dot = true).continue(rest)) + case "-s" :: filename :: rest => + parse(parser.setSaveFilename(filename).continue(rest)) + + case "-l" :: filename :: rest => + parse(parser.setLoadFilename(filename).continue(rest)) + case arg => throw new Exception(s"invalid arguments $arg") @@ -171,6 +189,8 @@ object Repl: performance: Boolean, timeout: Long, dot: Boolean, + saveFile: Option[String] = None, + loadFile: Option[String] = None, someLoader: Option[String => SchemeExp] = None ): Unit = val loader: String => SchemeExp = someLoader.getOrElse(Reader.loadFile andThen parser.parse) @@ -178,12 +198,16 @@ object Repl: val exp = loader(filename) def runSingle(): Long = val anl = makeAnalysis(exp) - val (elapsed, _) = Timer.time { anl.analyzeWithTimeout(Timeout.start(timeout.seconds)) } + if loadFile.isDefined then + val (elapsed, _) = Timer.time { anl.load(loadFile.get) } //anl.analyzeWithTimeout(Timeout.start(timeout.seconds)) } + println(s"load time: ${elapsed / 1000000}") + val (elapsed, _) = Timer.time { anl.analyze() } //anl.analyzeWithTimeout(Timeout.start(timeout.seconds)) } // Do not print results if we are in perfomance testing mode if !performance then if !anl.finished then println("Analysis timed out") anl.printResult println(s"Analysis took ${elapsed / (1000 * 1000)} ms") + if saveFile.isDefined then anl.save(saveFile.get) // Print a dot graph if the dot option has been enabled if dot then anl.toDot(filename.replace("/", "_").nn + ".dot") elapsed @@ -221,7 +245,7 @@ object Repl: else runSingle() /** Runs a REPL that can be used to interactively test the abstract interpreter */ - private def runRepl(parser: P, makeAnalysis: A): Unit = + private def runRepl(parser: P, makeAnalysis: A, saveFile: Option[String]): Unit = def repl(): Unit = print(">") val program = readLine().trim().nn @@ -232,6 +256,7 @@ object Repl: anl.printResult println(s"Analysis took ${elapsed / (1000 * 1000)} ms") + if saveFile.isDefined then anl.save(saveFile.get) repl() repl() @@ -251,6 +276,8 @@ object Repl: assert(if options.dot then options.filename.isDefined else true, "-dot can only be combined with -f") // ensure that "-m" is not combined with "-f" assert(if options.module.isDefined then !options.filename.isDefined else true, "-m can not be combined with -f") + // ensure that "-i" is not combined with "-l" + assert(!(options.interactive && options.loadFile.isDefined), "-i can not be combined with -l") // setup the parser val parser = setupParser(options.parser) // setup the loader @@ -261,8 +288,8 @@ object Repl: val analysisFactory = setupAnalysis(options.analysis.get) // setup the loader of the file/module // either run the file or the repl - if options.interactive then runRepl(parser, analysisFactory) + if options.interactive then runRepl(parser, analysisFactory, options.saveFile) else // retrieve the file or module name val path = options.filename.getOrElse(options.module.get) - runFile(path, parser, analysisFactory, options.performance, options.timeout, options.dot, loader) + runFile(path, parser, analysisFactory, options.performance, options.timeout, options.dot, options.saveFile, options.loadFile, loader) diff --git a/code/shared/src/main/scala/maf/language/scheme/SchemeExp.scala b/code/shared/src/main/scala/maf/language/scheme/SchemeExp.scala index a43f886ee..34af2f2a9 100644 --- a/code/shared/src/main/scala/maf/language/scheme/SchemeExp.scala +++ b/code/shared/src/main/scala/maf/language/scheme/SchemeExp.scala @@ -379,7 +379,7 @@ sealed trait SchemeLettishExp extends SchemeExp: val body: List[SchemeExp] val idn: Identity override type T <: SchemeLettishExp - override val height: Int = 1 + bindings.foldLeft(0)((mx, b) => mx.max(b._2.height).max(body.foldLeft(0)((mx, e) => mx.max(e.height)))) + override val height: Int = 1 + bindings.foldLeft(0)((mx, b) => mx.max(b._2.height)).max(body.foldLeft(0)((mx, e) => mx.max(e.height))) def subexpressions: List[Expression] = bindings.foldLeft(List[Expression]())((a, b) => b._2 :: b._1 :: a) ::: body override def isomorphic(other: Expression): Boolean = super.isomorphic(other) && body.length == other.asInstanceOf[SchemeLettishExp].body.length override def eql(other: Expression): Boolean = super.eql(other) && body.length == other.asInstanceOf[SchemeLettishExp].body.length @@ -1048,6 +1048,7 @@ case class SchemeAssert(exp: SchemeExp, idn: Identity) extends SchemeExp: override def toString: String = s"(assert $exp)" def fv: Set[String] = exp.fv val label: Label = ASS + override val height: Int = 1 + exp.height override def deepDropIdentifier(id: Identifier): Option[SchemeExp] = exp.deepDropIdentifier(id) match diff --git a/code/shared/src/main/scala/maf/modular/ModAnalysis.scala b/code/shared/src/main/scala/maf/modular/ModAnalysis.scala index 7ba883c5a..4785ee2b4 100644 --- a/code/shared/src/main/scala/maf/modular/ModAnalysis.scala +++ b/code/shared/src/main/scala/maf/modular/ModAnalysis.scala @@ -18,6 +18,9 @@ case class Metric(name: String, result: Double) /** Super type of all analyses in MAF, provides basic entry points to the analysis */ trait AnalysisEntry[Exp <: Expression]: + /** The name of the analysis */ + val analysisName: String = "None" + /** Returns a boolean indicating whether the analysis has finished. Implementation should be provided by the work list algorithm. */ def finished: Boolean @@ -40,6 +43,42 @@ trait AnalysisEntry[Exp <: Expression]: /** Method that defines how to print the result of the analysis */ def printResult: Unit = println(result) + /** + * This saves the current analysis to a file + * + * @param filename + * The file to save to + */ + def save(filename: String): Unit = System.err.nn.println("Save functionality is not implemented for this analysis") + + /** + * This saves the current analysis to a file, but only the elements that are in save + * + * @param filename + * The file to save to + * @param save + * The elements to save + */ + def save(filename: String, save: Set[String]): Unit = System.err.nn.println("Save functionality is not implemented for this analysis") + + /** + * Load an analysis from a given file + * + * @param filename + * The file to load the analysis from + */ + def load(filename: String): Unit = System.err.nn.println("Load functionality is not implemented for this analysis") + + /** + * Load the given elements of an analysis from a given file + * + * @param filename + * The file to load the analysis from + * @param load + * The elements to load + */ + def load(filename: String, load: Set[String]): Unit = System.err.nn.println("Load functionality is not implemented for this analysis") + /** * Method that renders a Dot graph of the components and the dependencies between them and writes it to a file * @@ -134,6 +173,8 @@ abstract class ModAnalysis[Expr <: Expression](val program: Expr) extends Clonea // flag to indicate if the analysis has already been initialized (see method `init`) private var initialized: Boolean = false + def analysisInitialized = initialized + def analysisInitialized_=(init: Boolean) = initialized = init /* Runs the analysis with a timeout. Implementation should be provided by the worklist algorithm. */ protected def run(timeout: Timeout.T): Unit @@ -167,5 +208,4 @@ abstract class ModAnalysis[Expr <: Expression](val program: Expr) extends Clonea // Print analysis information. def configString(): String = "Modular analysis" - } diff --git a/code/shared/src/main/scala/maf/modular/scheme/modf/SchemeModFSemantics.scala b/code/shared/src/main/scala/maf/modular/scheme/modf/SchemeModFSemantics.scala index 28e03fd08..3a9351f65 100644 --- a/code/shared/src/main/scala/maf/modular/scheme/modf/SchemeModFSemantics.scala +++ b/code/shared/src/main/scala/maf/modular/scheme/modf/SchemeModFSemantics.scala @@ -14,6 +14,9 @@ import maf.util._ import maf.core.IdentityMonad.given import maf.core.Monad.MonadIterableOps import maf.core.Monad.MonadSyntaxOps +import maf.save.SaveModF +import maf.save.Savable +import maf.save.LoadModF trait BaseEvalM[M[_]] extends Monad[M] with MonadError[M, Error] with MonadJoin[M] @@ -71,7 +74,7 @@ trait BaseSchemeModFSemanticsM caller: Component ): M[ComponentContext] = Monad[M].unit(allocCtx(clo, args, call, caller)) - lazy val mainBody = program + var mainBody = program def expr(cmp: Component): SchemeExp = body(cmp) def body(cmp: Component): SchemeExp = body(view(cmp)) def body(cmp: SchemeModFComponent): SchemeExp = cmp match diff --git a/code/shared/src/main/scala/maf/persistence/load/Analysis.scala b/code/shared/src/main/scala/maf/persistence/load/Analysis.scala new file mode 100644 index 000000000..d30b40d77 --- /dev/null +++ b/code/shared/src/main/scala/maf/persistence/load/Analysis.scala @@ -0,0 +1,122 @@ +package maf.save + +import io.bullet.borer.Decoder +import io.bullet.borer.Json +import maf.core.Expression +import java.nio.file.Paths +import java.nio.file.Files +import maf.language.scheme.SchemeExp +import io.bullet.borer.Reader +import scala.collection.mutable.HashMap +import maf.modular.AnalysisEntry +import maf.modular.ModAnalysis +import scala.collection.mutable.ListBuffer +import io.bullet.borer.Cbor + +/** + * Contains info about the top-level objects that need to be loaded. + * + * @param load + * Function that should be called with the value after it was decoded + * @param decoder + * Decodes the value + * @tparam T + * The type of the value the needs to be loaded + */ +case class Loadable[T](val load: (T) => Unit)(using val decoder: Decoder[T]) + +/** + * The base trait for saving an analysis. + * + * Implementing this allows you to load your analysis, by default it will only load the name of your analysis and you should mixin other traits like + * [[LoadComponents]] to also load components. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait Load[Expr <: Expression] extends AnalysisEntry[Expr]: + /** Decode an analysis. */ + given analysisDecoder: Decoder[Load[Expr]] with + override def read(reader: Reader): Load[Expr] = + val loadInfo = Load.this.loadInfo + loadSet = loadInfo.map(_._1).toSet + reader.read[Load[Expr]]() + loadSet = Set[String]() + return Load.this + + protected var loadSet = Set[String]() + protected given excludedAnalysisDecoder: MapDecoder[Load[Expr]] with + override def read(reader: Reader): Load[Expr] = + reader.start() + for (key, value) <- loadInfo do + if loadSet.contains(key) then + val result = reader.readMember(key)(using value.decoder) + value.load(result) + reader.close() + return Load.this + + def startLoad(): Unit = return + + override def load(filename: String): Unit = + startLoad() + val bytes = Files.readAllBytes(Paths.get(filename)) + if bytes != null then Json.decode(bytes).to[Load[Expr]](using analysisDecoder).value + + override def load(filename: String, load: Set[String]): Unit = + startLoad() + val bytes = Files.readAllBytes(Paths.get(filename)) + if bytes != null then Json.decode(bytes).to[Load[Expr]](using excludedAnalysisDecoder).value + + /** + * Returns a map strings and [[Loadable]] s. + * + * This map defines all top-level objects that should be loaded in your analysis, and the key with which they should be loaded. If you want to + * load something else, you can override this method and add something to it. + * + * {{{ + * override def loadInfo: Map[String, Loadable[_]] = + * super.loadInfo + ("< key >" -> Loadable[< loadType >]((< loaded object >: < SaveType >) => < put loaded object into analysis >)) + * }}} + */ + def loadInfo: List[(String, Loadable[_])] = List(("name", Loadable((name: String) => ()))) + +trait LoadCbor[Expr <: Expression] extends Load[Expr]: + override def load(filename: String): Unit = + startLoad() + val bytes = Files.readAllBytes(Paths.get(filename)) + if bytes != null then Cbor.decode(bytes).to[Load[Expr]](using analysisDecoder).value + + override def load(filename: String, load: Set[String]): Unit = + startLoad() + val bytes = Files.readAllBytes(Paths.get(filename)) + if bytes != null then Json.decode(bytes).to[Load[Expr]](using excludedAnalysisDecoder).value + +trait LoadInitialized[Expr <: Expression] extends ModAnalysis[Expr] with Load[Expr]: + override def loadInfo: List[(String, Loadable[?])] = + super.loadInfo ++ List( + ("initialized", + Loadable((initialized: Boolean) => + if !visited.contains(initialComponent) then visited = visited + initialComponent + analysisInitialized = initialized + ) + ) + ) + +/** The trait used to load the modF analysis. */ +trait LoadModF + extends Load[SchemeExp] + with LoadInitialized[SchemeExp] + with LoadSchemeExpressions + // with LoadExpressionIntID[SchemeExp] + with LoadActualExpressions[SchemeExp] + with LoadMainSchemeBody + with LoadComponents[SchemeExp] + // with LoadComponentIntID[SchemeExp] + with LoadActualComponents[SchemeExp] + with LoadStandardSchemeComponents + with LoadNoContext[SchemeExp] + with LoadSchemeAddr + with LoadDependency[SchemeExp] + with LoadAddrDependency[SchemeExp] + with LoadGlobalStore[SchemeExp] + with LoadModularSchemeDomain diff --git a/code/shared/src/main/scala/maf/persistence/load/Component.scala b/code/shared/src/main/scala/maf/persistence/load/Component.scala new file mode 100644 index 000000000..768bf30ae --- /dev/null +++ b/code/shared/src/main/scala/maf/persistence/load/Component.scala @@ -0,0 +1,344 @@ +package maf.save + +import maf.core.Address +import maf.core.Expression +import maf.core.Position.Position +import maf.language.scheme.SchemeExp +import maf.modular.AddrDependency +import maf.modular.AnalysisResults +import maf.modular.Dependency +import maf.modular.scheme.modf.SchemeModFComponent +import maf.modular.scheme.modf.StandardSchemeModFComponents +import io.bullet.borer.derivation.MapBasedCodecs +import maf.language.scheme.SchemeLambdaExp +import maf.core.Identifier +import maf.core.Identity +import maf.core.IdentityData +import maf.language.scheme.SchemeFuncall +import maf.language.scheme.SchemeLambda +import io.bullet.borer.derivation.ArrayBasedCodecs +import maf.language.scheme.SchemeVarArgLambda +import maf.language.scheme.SchemeIf +import maf.language.scheme.SchemeLet +import maf.language.scheme.SchemeVar +import maf.core.BasicEnvironment +import maf.core.Environment +import maf.core.WrappedEnv +import maf.core.NestedEnv +import maf.modular.scv.ScvContextSensitivity +import maf.modular.scheme.modf.NoContext +import io.bullet.borer.Decoder +import io.bullet.borer.Reader +import maf.core.Position +import maf.core.Position.PTag +import scala.collection.mutable.HashMap +import maf.language.scheme.SchemeValue +import maf.language.scheme.SchemeLetrec +import maf.language.scheme.SchemeAssert +import maf.language.scheme.SchemeSet +import maf.language.scheme.SchemeBegin +import maf.language.scheme.SchemeLetStar +import maf.core.worklist.WorkList +import maf.modular.worklist.SequentialWorklistAlgorithm +import maf.modular.worklist.FIFOWorklistAlgorithm +import scala.collection.immutable.Queue +import maf.core.worklist.FIFOWorkList +import maf.modular.ModAnalysis +import io.bullet.borer.derivation.CompactMapBasedCodecs +import java.{util => ju} + +/** + * The base trait for decoding components. + * + * @note + * This trait gives the methods needed to decode components, but does not implement them yet, other traits like [[LoadStandardSchemeComponents]] or + * [[LoadComponentIntID]] should be mixed in for the implementation. The trait that should be mixed in depends on the kind of components are used in + * your analysis. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait LoadComponents[Expr <: Expression] extends ModAnalysis[Expr] with Load[Expr]: + given componentDecoder: Decoder[Component] + +/** + * The base trait for decoding components. + * + * This trait is used to add [[actualComponentDecoder]], this given cannot be added into [[LoadComponents]] because this would cause an ambigious + * implicit with [[componentEncoder]]. + * + * @note + * This trait gives the methods needed to decode components, but does not implement them yet, other traits like [[LoadStandardSchemeComponents]] or + * [[LoadComponentIntID]] should be mixed in for the implementation. The trait that should be mixed in depends on the kind of components are used in + * your analysis. + * @note + * This trait should not be used, rather, [[LoadExpressions]] should be extended. + * + * @tparam Expr + * The type of expression used in the analysis + */ +protected trait LoadActualComps[Expr <: Expression] extends LoadComponents[Expr]: + /** Encodes the actual component. */ + given actualComponentDecoder: Decoder[Component] + +/** + * Load components normally. + * + * Implementation of [[LoadComponents]] + */ +trait LoadActualComponents[Expr <: Expression] extends LoadActualComps[Expr]: + override given componentDecoder: Decoder[Component] with + override def read(reader: Reader): Component = + val component = reader.read[Component]()(using actualComponentDecoder) + if !visited.contains(component) then visited = visited + component + return component + +/** + * Load standard scheme components + * + * Implementation of [[LoadComponents]] + */ +trait LoadStandardSchemeComponents + extends LoadActualComps[SchemeExp] + with StandardSchemeModFComponents + with LoadContext[SchemeExp] + with LoadEnvironment[SchemeExp] + with LoadExpressions[SchemeExp]: + override given actualComponentDecoder: MapDecoder[Component] with + override def read(reader: Reader): Component = + if reader.tryReadString("main") then return initialComponent + else reader.read[SchemeModFComponent.Call[DecodeContext]]() + + given MapDecoder[SchemeModFComponent.Call[DecodeContext]] with + override def read(reader: Reader): SchemeModFComponent.Call[DecodeContext] = + reader.start() + val lambda = reader.readMember[SchemeExp]("lambda").asInstanceOf[SchemeLambdaExp] + val environment = reader.readMember[Environment[Address]]("environment") + val context = reader.readMember[DecodeContext]("context") + reader.close() + return new SchemeModFComponent.Call[DecodeContext]((lambda, environment), context) + +/** + * The base trait for decoding context. + * + * @note + * This trait gives the methods needed to decode context, but does not implement them yet, other traits like [[LoadNoContext]] should be mixed in + * for the implementation. The trait that should be mixed in depends on the kind of context that is used in your analysis. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait LoadContext[Expr <: Expression] extends Load[Expr]: + /** The type of context that should be decoded. */ + type DecodeContext + + /** + * Get the decoder that will be used to decode context. + * + * This decoder is used to decode objects where the key is important, when you want to e.g. decode a type from the key, some decoders might ignore + * this key, and should therefore not be used here. + * + * This will influence how context will be decoded, this can be e.g. a [[MapDecoder map-based]] decoder or an [[ArrayDecoder array-based]] + * decoder. + */ + given contextDecoder: Decoder[DecodeContext] + +/** + * Trait to decode the context for an analysis with no context. + * + * This will expect 'None' when reading context. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait LoadNoContext[Expr <: Expression] extends LoadContext[Expr]: + override type DecodeContext = NoContext.type + override given contextDecoder: Decoder[DecodeContext] with + override def read(reader: Reader): DecodeContext = + if !reader.tryReadString("None") then return reader.unexpectedDataItem("None") + return NoContext + +/** + * Trait to decode positions. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait LoadPosition[Expr <: Expression] extends Load[Expr]: + given Decoder[Position] = CompactMapBasedCodecs.deriveAllDecoders[Position] + given Decoder[PTag] = CompactMapBasedCodecs.deriveAllDecoders[PTag] + + given Decoder[Identifier] = CompactMapBasedCodecs.deriveDecoder[Identifier] + given Decoder[Identity] = CompactMapBasedCodecs.deriveAllDecoders[Identity] + given Decoder[IdentityData] with + private object IdnData extends IdentityData { + // TODO: + def canEqual(that: Any): Boolean = ??? + def productArity: Int = ??? + def productElement(n: Int): Any = ??? + } + override def read(reader: Reader): IdentityData = + System.err.nn.println("IdentityData could not be decoded") + return IdnData + +/** + * Trait to decode environments. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait LoadEnvironment[Expr <: Expression] extends Load[Expr] with LoadAddr[Expr]: + given [T <: Address]: MapDecoder[Environment[T]] with + override def read(reader: Reader): Environment[T] = + reader.start() + val value = reader + .readMembers[Environment[T]]( + Map("basicEnvironment" -> summon[Decoder[BasicEnvironment[T]]], "nestedEnvironment" -> summon[Decoder[NestedEnv[T, T]]]) + ) + reader.close() + return value._2 + + given [T <: Address]: Decoder[BasicEnvironment[T]] with + override def read(reader: Reader): BasicEnvironment[T] = return new BasicEnvironment( + reader.read[Map[String, Address]]().asInstanceOf[Map[String, T]] + ) + + given [T <: Address, K <: Address]: MapDecoder[NestedEnv[T, K]] with + override def read(reader: Reader): NestedEnv[T, K] = + reader.start() + val content = reader.readMember[Map[String, Address]]("content") + val rst = reader.readOptionMember[Address]("rst") + reader.close() + return new NestedEnv(content.asInstanceOf[Map[String, T]], rst.asInstanceOf[Option[K]]) + +/** + * The base trait for decoding components only by their ID. + * + * @note + * This trait gives the methods needed to decode context, but does not implement them yet, other traits like [[LoadComponentIntID]] should be mixed + * in for the implementation. The trait that should be mixed in depends on the kind of context that is used in your analysis. + * + * @note + * Because this trait only decodes the component IDs, the entire component should have already been decoded and placed in [[components]], so the ID + * can be mapped to an actual component. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait LoadComponentID[Expr <: Expression] extends LoadActualComps[Expr] with LoadPosition[Expr]: + /* The type of ID that will be used to load components */ + type ID + + /** Map that connects component IDs to the component. */ + var components = HashMap[ID, Component]() + + override def startLoad(): Unit = + super.startLoad() + components = HashMap[ID, Component]() + + /** + * Register a loaded component, this allows you to also reference components using their ID using the [[components]] map. + * + * @param component + * The component to register + */ + def addComponent(component: Component): Unit + override given componentDecoder: Decoder[Component] = componentIDDecoder + given componentIDDecoder: Decoder[Component] + protected given componentSetDecoder: Decoder[Set[Component]] + + override def loadInfo: List[(String, Loadable[_])] = + super.loadInfo ++ List(("components" -> Loadable((visited: Set[Component]) => + visited.foreach((component) => addComponent(component)) + this.visited = visited + ))) + +/** + * Trait that decodes standard scheme components using their position. + * + * Implementation of [[LoadComponentID]]. + * + * @note + * Because this trait only decodes the component positions, the entire component should have already been decoded and placed in [[components]], so + * the position can be mapped to an actual component. + */ +trait LoadStandardSchemeComponentPosition extends LoadComponentID[SchemeExp] with LoadContext[SchemeExp]: + override type ID = Position + override def addComponent(component: Component): Unit = + if component != initialComponent then + components.addOne(component.asInstanceOf[SchemeModFComponent.Call[DecodeContext]].clo._1.idn.pos, component) + + override protected given componentSetDecoder: ArrayDecoder[Set[Component]] with + override def read(reader: Reader): Set[Component] = + reader.start() + val components = + reader.readUntilBeforeBreak( + Set[Component](), + (components: Set[Component]) => components + reader.readMember[Component]()(using actualComponentDecoder)._2 + ) + reader.close() + return components + + override given componentIDDecoder: Decoder[Component] with + override def read(reader: Reader): Component = + if reader.tryReadString("main") then return initialComponent + else reader.read[SchemeModFComponent.Call[DecodeContext]]().asInstanceOf[Component] + + given schemeComponentIDDecoder[T]: Decoder[SchemeModFComponent.Call[DecodeContext]] with + override def read(reader: Reader): SchemeModFComponent.Call[DecodeContext] = + val pos = reader.read[Position]() + return components(pos).asInstanceOf[SchemeModFComponent.Call[DecodeContext]] + +/** + * Trait that decodes standard scheme components using an integer ID. + * + * Implementation of [[LoadComponentID]]. + * + * @note + * Because this trait only decodes the component ID, the entire component should have already been decoded and placed in [[components]], so the ID + * can be mapped to an actual component. + */ +trait LoadComponentIntID[Expr <: Expression] extends LoadComponentID[Expr]: + override type ID = Int + override def addComponent(component: Component): Unit = return + + override given componentIDDecoder: Decoder[Component] with + override def read(reader: Reader): Component = + if reader.hasInt then + val id = reader.readInt() + return components(id) + else return reader.read[Component]()(using actualComponentDecoder) + + override protected given componentSetDecoder: MapDecoder[Set[Component]] with + override def read(reader: Reader): Set[Component] = + reader.start() + val components = reader.readUntilBeforeBreak( + Set[Component](), + (components: Set[Component]) => + val component = reader.readMember[Component]()(using componentDecoder) + val key = component._1.toInt + LoadComponentIntID.this.components.addOne((key, component._2)) + components + (component._2) + ) + reader.close() + return components + +trait LoadWorklist[Expr <: Expression] extends ModAnalysis[Expr] with Load[Expr]: + given worklistDecoder: Decoder[WorkList[Component]] + def setWorklist(worklist: WorkList[Component]): Unit + def newWorklist(components: List[Component]): WorkList[Component] + override def loadInfo: List[(String, Loadable[_])] = + super.loadInfo ++ List(("worklist" -> Loadable((worklist: WorkList[Component]) => setWorklist(worklist)))) + +trait LoadSequentialWorklist[Expr <: Expression] extends SequentialWorklistAlgorithm[Expr] with LoadWorklist[Expr] with LoadComponents[Expr]: + override def setWorklist(worklist: WorkList[Component]): Unit = workList = worklist + given worklistDecoder: ArrayDecoder[WorkList[Component]] with + override def read(reader: Reader): WorkList[Component] = + reader.start() + val worklistComponents = reader.readUntilBeforeBreak(List[Component](), (lst: List[Component]) => lst ++ List(reader.read[Component]())) + reader.close() + return newWorklist(worklistComponents) + +trait LoadFIFOWorklist[Expr <: Expression] extends LoadSequentialWorklist[Expr] with FIFOWorklistAlgorithm[Expr]: + override def newWorklist(components: List[Component]): WorkList[Component] = + return FIFOWorkList[Component](components.iterator.to(Iterable)) diff --git a/code/shared/src/main/scala/maf/persistence/load/Decoder.scala b/code/shared/src/main/scala/maf/persistence/load/Decoder.scala new file mode 100644 index 000000000..ba28c7e6a --- /dev/null +++ b/code/shared/src/main/scala/maf/persistence/load/Decoder.scala @@ -0,0 +1,424 @@ +package maf.save + +import io.bullet.borer.derivation.MapBasedCodecs +import io.bullet.borer.derivation.ArrayBasedCodecs +import io.bullet.borer.Decoder +import io.bullet.borer.Reader +import scala.collection.mutable.HashMap +import io.bullet.borer.derivation.CompactMapBasedCodecs +import scala.concurrent.Future +import scala.concurrent.Promise +import scala.util.{Failure, Success} +import scala.concurrent.ExecutionContext.Implicits.global +import scala.reflect.ClassTag + +/** + * Base trait for an decoder. + * + * This trait has methods for reading keys and values with a decoder from a given reader and depending on the implementation off this trait, this can + * either read your values from a map or an array. This allows you to decode your values in a certain way, but without knowing exactly how it was + * stored (e.g. in a map or in an array). + * + * This trait also has method for opening and closing an encapsulation, this will open/close either a map or an array based on the implementation of + * this trait. + * + * @note + * This trait gives the methods needed to read values, but not the implementation. Other traits like [[MapDecoder]] or [[ArrayDecoder]] should be + * used depending on how you want your value to be decoded. + * + * @note + * Some decoders like [[ArrayDecoder]] will also not decode your key, because arrays do not require keys like maps do, if the key does need to be + * stored, you should use an decoder that writes them down like [[MapDecoder]] or [[ArrayKeyDecoder]] if you want to use an array. + */ +trait AbstractDecoder[T] extends Decoder[T]: + /** + * Read a key from the given reader. + * + * @note + * This only reads a key and should therefore be followed up by reading a value using [[readValue]]. + * + * @note + * In some decoders like [[ArrayDecoder]] this will not read anything as this decoder does not use keys. + * + * @param reader + * The reader used read the key + * @return + * The key that was read. + */ + def readKey(reader: Reader): String + + /** + * Read a given key from the given reader. + * + * This will read a given key, if this key is not yet readable because it only appears later in the file, the reading of this key will be delayed + * until it becomes available. + * + * @note + * This only reads a key and should therefore be followed up by reading a value using [[readValue]]. + * + * @note + * In some decoders like [[ArrayDecoder]] this will not read anything as this decoder does not use keys. + * + * @param reader + * The reader used read the key + * @param key + * The key that should be read + * @return + * The key that was read. + */ + def readKey(reader: Reader, key: String): String + + /** + * Read a given value from the given reader. + * + * This will read a value of type T using the previously read key, if this key is not yet available because it appears later in the file, this + * will be delayed until it becomes available. In this case the returned object will not have a value yet, which will be filled in when the key + * becomes available. + * + * @note + * This only reads a value and should therefore only be called after reading a key using [[readKey]]. + * + * @note + * In some decoders like [[ArrayDecoder]] the value will always be read immediately because there are no keys. + * + * @param reader + * The reader used read the value + * @tparam T + * The type of value that should be decoded + * @returns + * The key-value pair that is read, if the key cannot be read yet because it appears later in the file, the value will be empty and will be + * filled in later. + * + * @throws IllegalStateException + * When you call this before calling [[readKey]] + */ + def readValue[T: Decoder](reader: Reader): T = reader.read[T]() + + /** + * Opens either a new map or a new array, based on the decoder that is used. + * + * @note + * This only opens a new encapsulation and should therefore only be called after reading a key using [[readKey]]. + * + * @param reader + * The reader used read the array/map + */ + def openEncapsulation(reader: Reader): Unit + + /** + * Opens either a new map or a new array, based on the decoder that is used. + * + * @note + * This only opens a new encapsulation and should therefore only be called after reading a key using [[readKey]]. + * + * @param reader + * The reader used read the array/map + * @param amount + * The amount of elements the map/array, this is only useful when using CBOR the length of maps and arrays is used in the encoding + */ + def openEncapsulation(reader: Reader, amount: Int): Unit + + /** + * Closes the map/array, based on the decoder that is used. + * + * @note + * This only closes the encapsulation and should therefore only be called after having already opened an encapsulation using + * [[openEncapsulation]]. + * + * @param reader + * The reader used read the array/map + * @param unbounded + * Wether the array/map had a fixed length or not + * @param res + * The object that was encoded inside of the array/map + * @tparam T + * The type of the object that was encoded inside of the array/map + */ + def closeEncapsulation[T](reader: Reader): Unit + + /** + * Read the first element that you can, discarding any other elements. + * + * @param reader + * The reader used read + */ + def forceReadValue(reader: Reader): Unit + + /** [TODO: description] */ + def decodeOption[T: Decoder](reader: Reader, key: String): Option[T] + + extension (reader: Reader) + /** + * Read a key-value pair. + * + * This will read a key-value pair in either a map or an array, based on the decoder that is given. If the key cannot be read yet because it + * appears later in the file, the resulting value will not have a value yet, this value will be filled in when the key becomes readable. + * + * @param key + * The key to read + * @param decoder + * Implicit argument that decides how to read the key-value pair + * @tparam T + * The type of the value that should be read, this type should have an decoder + * @returns + * The key-value pair that is read, if the key cannot be read yet because it appears later in the file, the value will be empty and will be + * filled in later. + */ + def readMember[T: Decoder](key: String): T = + readKey(reader, key) + readValue[T](reader) + + /** + * Read a key-value pair. + * + * This will read a key-value pair in either a map or an array, based on the decoder that is given. The key will be the first available key or + * a autogenerated key if you are using a decoder that doesn't use keys like [[ArrayDecoder]] + * + * @param key + * The key to read + * @param decoder + * Implicit argument that decides how to read the key-value pair + * @tparam T + * The type of the value that should be read, this type should have an decoder + * @returns + * The key-value pair that is read. + */ + def readMember[T: Decoder](): (String, T) = + val key = readKey(reader) + (key, readValue[T](reader)) + + def readOptionMember[T](key: String)(using Decoder[T]): Option[T] = + return decodeOption[T](reader, key) + + /** + * Read one of the given key-value pairs. + * + * This will read the first key that is found and return the value associated with it, if non of the keys can currently be read, the returned + * value will not have a key or a value. These will be filled in when a key matching one of the given keys is encountered. + * + * @note + * When using a decoder that doesn't use keys like [[ArrayDecoder]] the first decoder given will be used. + * + * @param keys + * The possible keys that could be read + * @param decoder + * Implicit argument that decides how to read the key-value pair + * @tparam T + * The type of the value that should be read, this type should have an decoder + * @returns + * The key-value pair that is read, if none of the keys can be read because it appears later in the file, the key and the value will be + * empty and will be filled in later. + */ + def readMembers[T](keys: Map[String, Decoder[_ <: T]]): (String, T) = + var key = readKey(reader) + if !keys.contains(key) then reader.unexpectedDataItem("", key) + return (key, reader.read[T]()(using keys.get(key).get.asInstanceOf[Decoder[T]])) + + /** + * Read until a map/array end is found and accumulate the result without reading the actual map/array end. + * + * @note + * The given function should also read the values, this method will just continuously call the given function with the returned value until + * a break is reached. + * + * @param zero + * The initial alue + * @param f + * The function transforming the previous value into the next value, this should include reading the value + * @return + * The final value + */ + def readUntilBeforeBreak[T](zero: T, f: T => T): T = + var res = zero + while !reader.hasBreak do res = f(res) + return res + + /** [TODO: description] */ + def start(): Unit = + openEncapsulation(reader) + + def start(amount: Int): Unit = openEncapsulation(reader, amount) + + def open(): Unit = + readKey(reader); + openEncapsulation(reader); + + def open(amount: Int): Unit = + readKey(reader); + openEncapsulation(reader, amount); + + /** [TODO: description] */ + def close(): Unit = + readUntilBeforeBreak(None, + (none: None.type) => + forceReadValue(reader) + None + ) + closeEncapsulation(reader) + +/** + * Decoder that uses maps to decode values. + * + * This decoder uses maps to decode values and therefore requires keys to be present. + * + * @example + * {{{ + * given MapDecoder[T] + * override protected def read(reader: Reader): T = + * reader.start() + * < decode value > + * reader.close() + * }}} + * + * @tparam T + * The type to encode + */ +trait MapDecoder[T] extends AbstractDecoder[T]: + override def readKey(reader: Reader): String = return reader.readString() + override def readKey(reader: Reader, key: String): String = + val read = reader.readString() + if !read.equals(key) then reader.unexpectedDataItem(key, read) + return read + override def openEncapsulation(reader: Reader): Unit = reader.readMapStart() + override def openEncapsulation(reader: Reader, amount: Int): Unit = reader.readMapOpen(amount) + override def closeEncapsulation[T](reader: Reader): Unit = reader.readBreak() + override def forceReadValue(reader: Reader): Unit = + reader.skipElement() + reader.skipElement() + override def decodeOption[T: Decoder](reader: Reader, key: String): Option[T] = + if reader.tryReadString(key) then return Some(reader.read[T]()) + else return None + +/** + * Decoder that uses arrays to decode values. + * + * This decoder uses arrays to decode values and does not require keys, if you want an array-based decoder that saves keys, you should use + * [[ArrayKeyDecoder]]. + * + * @example + * {{{ + * given ArrayDecoder[T] + * override protected def read(reader: Reader): T = + * reader.start() + * < decode value > + * reader.close() + * }}} + * + * @tparam T + * The type to encode + */ +trait ArrayDecoder[T] extends AbstractDecoder[T]: + /** Used to generate IDs if no key is provided, this is used to store the values. */ + protected var id = -1 + override def readKey(reader: Reader): String = return "" + override def readKey(reader: Reader, key: String): String = return key + override def openEncapsulation(reader: Reader): Unit = reader.readArrayStart() + override def openEncapsulation(reader: Reader, amount: Int): Unit = reader.readArrayOpen(amount) + override def closeEncapsulation[T](reader: Reader): Unit = reader.readBreak() + override def decodeOption[T: Decoder](reader: Reader, key: String): Option[T] = + var res: Option[T] = None + reader.readArrayOpen(1) + if !reader.hasBreak then res = Some(reader.read[T]()) + reader.readBreak() + return res + override def forceReadValue(reader: Reader): Unit = + reader.skipElement() + +/** + * Decoder that uses arrays to decode values, but preserves keys. + * + * This decoder uses arrays to decode values but requires values to be directly preceded by a key. This can be used, for example, if your keys are not + * strings. Since non-string keys are not supported in JSON, you cannot use a map for this, but this class does allow you to load your key-value pair + * in an intuitive way. + * + * This is how your key-value pair should be saved: + * {{{ + * [ + * < key1 >, + * < value1 >, + * < key2 >, + * < value2> , + * ... + * ] + * }}} + * + * @example + * {{{ + * given ArrayKeyDecoder[T] + * override protected def read(reader: Reader): T = + * reader.start() + * < decode value > + * reader.close() + * }}} + * + * @tparam T + * The type to encode + */ +trait ArrayKeyDecoder[T] extends MapDecoder[T]: + /** The key that was read, and which value should now be read. */ + protected var key: Option[Any] = None + + /** + * Read a key from the given reader. + * + * @note + * This only reads a key and should therefore be followed up by reading a value using [[readValue]]. + * + * @param reader + * The reader used read the key + * @tparam T + * The type of the key that should be read + * @return + * The key that was read. + */ + def readKey[T: Decoder](reader: Reader): Any = + key = Some(reader.read[T]()) + key.get + + /** + * Read a given value from the given reader. + * + * This will read a value of type T using the previously read key. + * + * @note + * This only reads a value and should therefore only be called after reading a key using [[readKey]]. + * + * @param reader + * The reader used read the value + * @tparam V + * The type of key that was decoded by [[readKey]] + * @tparam T + * The type of value that should be decoded + * @returns + * The key-value pair that is read, if the key cannot be read yet because it appears later in the file, the value will be empty and will be + * filled in later. + * + * @throws IllegalStateException + * When you call this before calling [[readKey]] + */ + def readKeyValue[V, T: Decoder](reader: Reader): (V, T) = + if key.isEmpty then throw new IllegalStateException(s"Trying to read a value before reading a key.") + val res = reader.read[T]() + val tmpKey = key + key = None + return (tmpKey.get.asInstanceOf[V], res) + override def openEncapsulation(reader: Reader): Unit = reader.readArrayStart() + override def openEncapsulation(reader: Reader, amount: Int): Unit = reader.readArrayOpen(amount) + override def closeEncapsulation[T](reader: Reader): Unit = reader.readBreak() + + extension (reader: Reader) + /** + * Reads a key-value pair. + * + * This will read a key-value pair in either a map or an array, based on the decoder that is given. + * + * @param decoder + * Implicit argument that decides how to read the key-value pair + * @tparam T + * The type of the key that should be read, this type should have an decoder + * @tparam U + * The type of the value that should be read, this type should have an decoder + */ + def readMember[K: Decoder, V: Decoder](): (K, V) = + readKey[K](reader) + readKeyValue[K, V](reader) diff --git a/code/shared/src/main/scala/maf/persistence/load/Dependency.scala b/code/shared/src/main/scala/maf/persistence/load/Dependency.scala new file mode 100644 index 000000000..09e6e77d7 --- /dev/null +++ b/code/shared/src/main/scala/maf/persistence/load/Dependency.scala @@ -0,0 +1,124 @@ +package maf.save + +import maf.language.scheme.SchemeExp +import maf.modular.Dependency +import maf.modular.AddrDependency +import maf.core.Expression +import maf.core.Address +import maf.modular.scheme.VarAddr +import maf.modular.ReturnAddr +import maf.modular.scheme.PrmAddr +import maf.modular.scheme.PtrAddr +import maf.language.scheme.SchemeValue +import io.bullet.borer.Reader +import io.bullet.borer.Decoder +import maf.core.Identifier +import maf.core.Identity +import scala.collection.mutable.HashMap + +/** + * Trait to decode address dependencies. + * + * Implementation of [[LoadDependency]]. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait LoadAddrDependency[Expr <: Expression] extends LoadDependency[Expr] with LoadPosition[Expr] with LoadAddr[Expr]: + given Decoder[AddrDependency] with + override def read(reader: Reader): AddrDependency = + val addr = reader.read[Address]() + return new AddrDependency(addr) + override def dependencyDecoders = super.dependencyDecoders ++ Set(("addrDependency", summon[Decoder[AddrDependency]])) + +/** + * The base trait for decoding dependencies. + * + * @note + * This trait gives the methods needed to decode dependencies, but does not implement them yet, other traits like [[LoadAddrDependency]] should be + * mixed in for the implementation. The trait that should be mixed in depends on the kind of dependencies that is used in your analysis. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait LoadDependency[Expr <: Expression] extends LoadMapToArray with LoadComponents[Expr]: + override def loadInfo: List[(String, Loadable[_])] = + super.loadInfo ++ List(("dependencies", Loadable((deps: Map[Dependency, Set[Component]]) => this.deps = deps))) + + /** Returns a map that links a key to a specific decoder. */ + def dependencyDecoders = Set[(String, Decoder[_ <: Dependency])]() + + given MapDecoder[Dependency] with + override def read(reader: Reader): Dependency = + reader.start() + val dependency = reader.readMembers(dependencyDecoders.toMap) + reader.close() + return dependency._2 + +/** + * The base trait for decoding addresses. + * + * @note + * This trait gives the methods needed to decode addresses, but does not implement them yet, other traits like [[LoadAddrDependency]] should be + * mixed in for the implementation. The trait that should be mixed in depends on the kind of addresses that is used in your analysis. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait LoadAddr[Expr <: Expression] extends Load[Expr] with LoadPosition[Expr]: + /** Returns a map that links a key to a specific decoder. */ + def addressDecoders = Set[(String, Decoder[_ <: Address])]() + + given MapDecoder[Address] with + override def read(reader: Reader): Address = + reader.start() + val address = reader.readMembers(addressDecoders.toMap)._2 + reader.close() + return address + +/** + * Trait to decode scheme addresses. + * + * This is an implementation of [[LoadAddr]]. + */ +trait LoadSchemeAddr extends LoadAddr[SchemeExp] with LoadContext[SchemeExp] with LoadComponents[SchemeExp] with LoadExpressions[SchemeExp]: + override def addressDecoders = + super.addressDecoders ++ Set( + ("varAddr", summon[Decoder[VarAddr[DecodeContext]]]), + ("prmAddr", summon[Decoder[PrmAddr]]), + ("returnAddr", summon[Decoder[ReturnAddr[Component]]]), + ("ptrAddr", summon[Decoder[PtrAddr[DecodeContext]]]) + ) + + given MapDecoder[ReturnAddr[Component]] with + override def read(reader: Reader): ReturnAddr[Component] = + reader.start() + val identity = reader.readMember[Identity]("identity") + val component = reader.readMember[Component]("component") + reader.close() + return new ReturnAddr[Component](component, identity) + + given MapDecoder[VarAddr[DecodeContext]] with + override def read(reader: Reader): VarAddr[DecodeContext] = + reader.start() + val name = reader.readMember[Identifier]("id") + val context = reader.readOptionMember[DecodeContext]("context") + reader.close() + return new VarAddr[DecodeContext]( + name, + context.asInstanceOf[DecodeContext] + ) + + given Decoder[PrmAddr] with + override def read(reader: Reader): PrmAddr = new PrmAddr(reader.read[String]()) + + given MapDecoder[PtrAddr[DecodeContext]] with + override def read(reader: Reader): PtrAddr[DecodeContext] = + reader.start() + val expression = reader.readMember[SchemeExp]("expression") + val context = reader.readOptionMember[DecodeContext]("context") + reader.close() + return new PtrAddr[DecodeContext]( + expression, + context.asInstanceOf[DecodeContext] + ) diff --git a/code/shared/src/main/scala/maf/persistence/load/Expression.scala b/code/shared/src/main/scala/maf/persistence/load/Expression.scala new file mode 100644 index 000000000..3609abcbc --- /dev/null +++ b/code/shared/src/main/scala/maf/persistence/load/Expression.scala @@ -0,0 +1,139 @@ +package maf.save + +import maf.core.Expression +import io.bullet.borer.Decoder +import maf.language.scheme.SchemeExp +import maf.language.scheme.SchemeValue +import maf.language.scheme.SchemeFuncall +import maf.language.scheme.SchemeVar +import maf.language.scheme.SchemeLambda +import maf.language.scheme.SchemeVarArgLambda +import maf.language.scheme.SchemeLambdaExp +import maf.language.scheme.SchemeLetrec +import maf.language.scheme.SchemeLet +import maf.language.scheme.SchemeIf +import maf.language.scheme.SchemeSet +import maf.language.scheme.SchemeBegin +import maf.language.scheme.SchemeLetStar +import maf.language.scheme.SchemeAssert +import io.bullet.borer.Reader +import scala.collection.mutable.HashMap +import io.bullet.borer.derivation.CompactMapBasedCodecs +import maf.modular.scheme.modf.BaseSchemeModFSemanticsM +import maf.util.Writer.write + +/** + * The base trait for decoding expressions. + * + * @note + * This trait gives the methods needed to decode expressions, but does not implement them yet, other traits like [[LoadSchemeExpressions]] or + * [[LoadExpressionIntID]] should be mixed in for the implementation. The trait that should be mixed in depends on the kind of expressions are used + * in your analysis. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait LoadExpressions[Expr <: Expression] extends Load[Expr]: + given expressionDecoder: Decoder[Expr] + +/** + * The base trait for decoding expressions. + * + * This trait is used to add [[actualExpressionDecoder]], this given cannot be added into [[LoadExpressions]] because this would cause an ambigious + * implicit with [[expressionEncoder]]. + * + * @note + * This trait gives the methods needed to decode expressions, but does not implement them yet, other traits like [[LoadSchemeExpressions]] or + * [[LoadExpressionIntID]] should be mixed in for the implementation. The trait that should be mixed in depends on the kind of expressions are used + * in your analysis. + * @note + * This trait should not be used, rather, [[LoadExpressions]] should be extended. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait LoadActualExprs[Expr <: Expression] extends LoadExpressions[Expr]: + /** Decodes the actual expression, and doesn't decode it using IDs. */ + protected given actualExpressionDecoder: Decoder[Expr] + +trait LoadActualExpressions[Expr <: Expression] extends LoadActualExprs[Expr]: + override given expressionDecoder: Decoder[Expr] = actualExpressionDecoder + +trait LoadExpressionID[Expr <: Expression] extends Load[Expr] with LoadActualExprs[Expr]: + override given expressionDecoder: Decoder[Expr] = expressionIDDecoder + override def loadInfo: List[(String, Loadable[?])] = + super.loadInfo ++ List(("expressions", Loadable((expressions: Set[Expr]) => ()))) + + /** Decodes a set of expressions, this is used to e.g. get ID info to the expressions. */ + protected given expressionSetDecoder: Decoder[Set[Expr]] + + /** Decodes an expression using an ID */ + protected given expressionIDDecoder: Decoder[Expr] + +trait LoadMainSchemeBody extends BaseSchemeModFSemanticsM with LoadExpressions[SchemeExp]: + override def loadInfo: List[(String, Loadable[?])] = + super.loadInfo ++ List(("mainBody", Loadable((exp: SchemeExp) => mainBody = exp))) + +trait LoadExpressionIntID[Expr <: Expression] extends LoadExpressionID[Expr]: + private var expressions: HashMap[Int, Expr] = HashMap[Int, Expr]() + + override def startLoad(): Unit = + super.startLoad() + expressions = HashMap[Int, Expr]() + + override protected given expressionSetDecoder: MapDecoder[Set[Expr]] with + override def read(reader: Reader): Set[Expr] = + reader.start() + val expressions = reader.readUntilBeforeBreak( + Set[Expr](), + (expressions: Set[Expr]) => + val expression = reader.readMember[Expr]()(using actualExpressionDecoder) + val key = expression._1.toInt + LoadExpressionIntID.this.expressions.addOne((key, expression._2)) + expressions + (expression._2) + ) + reader.close() + return expressions + override protected given expressionIDDecoder: Decoder[Expr] with + override def read(reader: Reader): Expr = + if reader.hasInt then return expressions(reader.readInt()) + else reader.read[Expr]()(using actualExpressionDecoder) + +trait LoadSchemeSubExpressions extends LoadExpressions[SchemeExp] with LoadPosition[SchemeExp]: + given Decoder[SchemeValue] = CompactMapBasedCodecs.deriveDecoder + given Decoder[maf.language.sexp.Value] = CompactMapBasedCodecs.deriveAllDecoders + given Decoder[SchemeFuncall] = CompactMapBasedCodecs.deriveDecoder[SchemeFuncall] + given Decoder[SchemeVar] = CompactMapBasedCodecs.deriveDecoder[SchemeVar] + given Decoder[SchemeLambda] = CompactMapBasedCodecs.deriveDecoder[SchemeLambda] + given Decoder[SchemeVarArgLambda] = CompactMapBasedCodecs.deriveDecoder[SchemeVarArgLambda] + given Decoder[SchemeLambdaExp] = CompactMapBasedCodecs.deriveDecoder[SchemeLambdaExp] + given Decoder[SchemeLetrec] = CompactMapBasedCodecs.deriveDecoder[SchemeLetrec] + given Decoder[SchemeAssert] = CompactMapBasedCodecs.deriveDecoder[SchemeAssert] + given Decoder[SchemeLet] = CompactMapBasedCodecs.deriveDecoder[SchemeLet] + given Decoder[SchemeIf] = CompactMapBasedCodecs.deriveDecoder[SchemeIf] + given Decoder[SchemeSet] = CompactMapBasedCodecs.deriveDecoder[SchemeSet] + given Decoder[SchemeBegin] = CompactMapBasedCodecs.deriveDecoder[SchemeBegin] + given Decoder[SchemeLetStar] = CompactMapBasedCodecs.deriveDecoder[SchemeLetStar] + +trait LoadSchemeExpressions extends LoadActualExprs[SchemeExp] with LoadSchemeSubExpressions: + override protected given actualExpressionDecoder: MapDecoder[SchemeExp] with + override def read(reader: Reader): SchemeExp = + reader.start() + val map: Map[String, Decoder[_ <: SchemeExp]] = + Map( + "funcall" -> summon[Decoder[SchemeFuncall]], + "var" -> summon[Decoder[SchemeVar]], + "lambda" -> summon[Decoder[SchemeLambda]], + "argLambda" -> summon[Decoder[SchemeVarArgLambda]], + "value" -> summon[Decoder[SchemeValue]], + "letrec" -> summon[Decoder[SchemeLetrec]], + "assert" -> summon[Decoder[SchemeAssert]], + "let" -> summon[Decoder[SchemeLet]], + "schemeIf" -> summon[Decoder[SchemeIf]], + "set" -> summon[Decoder[SchemeSet]], + "begin" -> summon[Decoder[SchemeBegin]], + "letStar" -> summon[Decoder[SchemeLetStar]], + ) + val expression = reader.readMembers[SchemeExp](map) + reader.close() + return expression._2 diff --git a/code/shared/src/main/scala/maf/persistence/load/Store.scala b/code/shared/src/main/scala/maf/persistence/load/Store.scala new file mode 100644 index 000000000..69fcd87a8 --- /dev/null +++ b/code/shared/src/main/scala/maf/persistence/load/Store.scala @@ -0,0 +1,253 @@ +package maf.save + +import maf.core.Expression +import maf.modular.AbstractDomain +import maf.modular.GlobalStore +import io.bullet.borer.Decoder +import maf.modular.scheme.ModularSchemeDomain +import maf.language.scheme.SchemeExp +import maf.lattice.HMapKey +import maf.lattice.HMap +import io.bullet.borer.Reader +import maf.language.scheme.lattices.ModularSchemeLattice +import maf.lattice.Concrete +import maf.lattice.ConstantPropagation +import maf.language.scheme.lattices.SchemeLattice +import maf.language.scheme.SchemeLambdaExp +import maf.core.Address +import maf.modular.scheme.modf.BaseSchemeModFSemanticsM +import maf.modular.scheme.SchemeConstantPropagationDomain +import maf.lattice.ConstantPropagation.L + +/** + * The base trait for decoding [[AbstractDomain.Value values]]. + * + * @note + * This trait gives the methods needed to decode values, but does not implement them yet, other traits like [[LoadModularDomain]] should be mixed in + * for the implementation. The trait that should be mixed in depends on the kind of values that is used in your analysis. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait LoadValue[Expr <: Expression] extends Load[Expr] with AbstractDomain[Expr]: + given valueDecoder: Decoder[Value] + +/* Trait to decode lattices. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait LoadLattice[Expr <: Expression] extends Load[Expr]: + /** + * The types of lattices that can be decoded by this trait. + * + * This is used to specify the givens, if this was not used, this given could be used for every class with a single abstract type. + */ + type Lattice[T] = ConstantPropagation.L[T] | Concrete.L[T] + + /** Returns a map that links a key to a specific decoder. */ + def latticeDecoders[T: Decoder] = Set[(String, Decoder[_ <: Lattice[T]])](("constant", constantLatticeDecoder[T])) + + given latticeDecoder[P[T] <: Lattice[T], T: Decoder]: MapDecoder[P[T]] with + override def read(reader: Reader): P[T] = + reader.start() + val lattice = reader.readMembers[P[T]](latticeDecoders.toMap.asInstanceOf[Map[String, io.bullet.borer.Decoder[? <: P[T]]]]) + reader.close() + return lattice._2 + + given constantLatticeDecoder[T: Decoder]: Decoder[ConstantPropagation.L[T]] with + override def read(reader: Reader): ConstantPropagation.L[T] = + if reader.tryReadString("top") then return ConstantPropagation.Top + else if reader.tryReadString("bottom") then ConstantPropagation.Bottom + else return new ConstantPropagation.Constant[T](reader.read[T]()) + +trait LoadModularSchemeDomainLattices extends Load[SchemeExp] with ModularSchemeDomain: + given stringLatticeDecoder: Decoder[S] + given booleanLatticeDecoder: Decoder[B] + given integerLatticeDecoder: Decoder[I] + given realLatticeDecoder: Decoder[R] + given charLatticeDecoder: Decoder[C] + given symbolLatticeDecoder: Decoder[Sym] + +trait LoadSchemeConstantPropagationDomain extends SchemeConstantPropagationDomain with LoadModularSchemeDomainLattices with LoadLattice[SchemeExp]: + override given stringLatticeDecoder: Decoder[S] = latticeDecoder[L, String].asInstanceOf[Decoder[S]] + override given booleanLatticeDecoder: Decoder[B] = latticeDecoder[L, Boolean].asInstanceOf[Decoder[B]] + override given integerLatticeDecoder: Decoder[I] = latticeDecoder[L, BigInt].asInstanceOf[Decoder[I]] + override given realLatticeDecoder: Decoder[R] = latticeDecoder[L, Double].asInstanceOf[Decoder[R]] + override given charLatticeDecoder: Decoder[C] = latticeDecoder[L, Char].asInstanceOf[Decoder[C]] + override given symbolLatticeDecoder: Decoder[Sym] = latticeDecoder[L, String].asInstanceOf[Decoder[Sym]] + +/** + * Trait to decode [[ModularSchemeLattice modular scheme lattices]]. + * + * Implementation of [[LoadModularDomain]]. + */ +trait LoadModularSchemeDomain + extends LoadModularDomain + with LoadAddr[SchemeExp] + with LoadExpressions[SchemeExp] + with BaseSchemeModFSemanticsM + with LoadEnvironment[SchemeExp] + with LoadLattice[SchemeExp] + with LoadComponents[SchemeExp] + with LoadSchemeConstantPropagationDomain: + type LoadSchemeLattice = ModularSchemeLattice[?, S, B, I, R, C, Sym] + override def hMapDecoders = super.hMapDecoders ++ Set( + ("int", summon[Decoder[(HMapKey, LoadSchemeLattice#Int)]]), + ("boolean", summon[Decoder[(HMapKey, LoadSchemeLattice#Bool)]]), + ("string", summon[Decoder[(HMapKey, LoadSchemeLattice#Str)]]), + ("real", summon[Decoder[(HMapKey, LoadSchemeLattice#Real)]]), + ("char", summon[Decoder[(HMapKey, LoadSchemeLattice#Char)]]), + ("inputPort", summon[Decoder[(HMapKey, LoadSchemeLattice#InputPort)]]), + ("kont", KDecoder), + ("primitive", summon[Decoder[(HMapKey, LoadSchemeLattice#Prim)]]), + ("closure", summon[Decoder[(HMapKey, LoadSchemeLattice#Clo)]]), + ("pointer", summon[Decoder[(HMapKey, LoadSchemeLattice#Pointer)]]), + ("symbol", summon[Decoder[(HMapKey, LoadSchemeLattice#Symbol)]]), + ("cons", summon[Decoder[(HMapKey, LoadSchemeLattice#Cons)]]), + ("vector", summon[Decoder[(HMapKey, LoadSchemeLattice#Vec)]]), + ("nil", nilLatticeDecoder), + ("void", summon[Decoder[(HMapKey, modularLattice.Void.type)]]), + ("ERROR", errorDecoder) + ) + + private given errorDecoder: Decoder[(HMapKey, modularLattice.Nil.type)] with + override def read(reader: Reader): (HMapKey, modularLattice.Nil.type) = + val error = reader.readString() + System.err.nn.println("The lattice was not correctly encoded and had error: `" + error + "`, using `nil` instead.") + return (modularLattice.NilT, modularLattice.Nil) + + given Decoder[(HMapKey, LoadSchemeLattice#Int)] with + override def read(reader: Reader): (HMapKey, LoadSchemeLattice#Int) = + val lattice = reader.read[I]() + return (modularLattice.IntT, new modularLattice.Int(lattice)) + + given Decoder[(HMapKey, LoadSchemeLattice#Real)] with + override def read(reader: Reader): (HMapKey, LoadSchemeLattice#Real) = + val lattice = reader.read[R]() + return (modularLattice.RealT, new modularLattice.Real(lattice)) + + given Decoder[(HMapKey, LoadSchemeLattice#Bool)] with + override def read(reader: Reader): (HMapKey, LoadSchemeLattice#Bool) = + val lattice = reader.read[B]() + return (modularLattice.BoolT, new modularLattice.Bool(lattice)) + + private given KDecoder: Decoder[(HMapKey, LoadSchemeLattice#K)] with + override def read(reader: Reader): (HMapKey, LoadSchemeLattice#K) = + val lattice = reader.read[Set[Component]]().asInstanceOf[Set[LoadSchemeLattice#K]] + return (modularLattice.KontT, new modularLattice.Kont(lattice)) + + given Decoder[(HMapKey, LoadSchemeLattice#Char)] with + override def read(reader: Reader): (HMapKey, LoadSchemeLattice#Char) = + val lattice = reader.read[C]() + return (modularLattice.CharT, new modularLattice.Char(lattice)) + + given Decoder[(HMapKey, LoadSchemeLattice#InputPort)] with + override def read(reader: Reader): (HMapKey, LoadSchemeLattice#InputPort) = + val lattice = reader.read[LoadSchemeLattice#L]() + return (modularLattice.InputPortT, new modularLattice.InputPort(lattice)) + + given Decoder[(HMapKey, LoadSchemeLattice#Str)] with + override def read(reader: Reader): (HMapKey, LoadSchemeLattice#Str) = + val lattice = reader.read[S]() + return (modularLattice.StrT, new modularLattice.Str(lattice)) + + given Decoder[(HMapKey, LoadSchemeLattice#Symbol)] with + override def read(reader: Reader): (HMapKey, LoadSchemeLattice#Symbol) = + val lattice = reader.read[Sym]() + return (modularLattice.SymbolT, new modularLattice.Symbol(lattice)) + + given Decoder[(HMapKey, LoadSchemeLattice#Prim)] with + override def read(reader: Reader): (HMapKey, LoadSchemeLattice#Prim) = + return (modularLattice.PrimT, new modularLattice.Prim(reader.read[Set[String]]())) + + given MapDecoder[(SchemeLambdaExp, Env)] with + override def read(reader: Reader): (SchemeLambdaExp, Env) = + reader.start() + val expression = reader.readMember[SchemeExp]("expression").asInstanceOf[SchemeLambdaExp] + val address = reader.readMember[Env]("address") + reader.close() + return (expression, address) + + given ArrayDecoder[(HMapKey, LoadSchemeLattice#Clo)]() with + override def read(reader: Reader): (HMapKey, LoadSchemeLattice#Clo) = + reader.start() + val closures = reader.readUntilBeforeBreak[Set[(SchemeLambdaExp, Env)]](Set[(SchemeLambdaExp, Env)](), + (closures) => closures + (reader.read[(SchemeLambdaExp, Env)]()) + ) + reader.close() + return (modularLattice.CloT, new modularLattice.Clo(closures)) + + given ArrayDecoder[(HMapKey, LoadSchemeLattice#Pointer)]() with + override def read(reader: Reader): (HMapKey, LoadSchemeLattice#Pointer) = + reader.start() + val pointers = reader.readUntilBeforeBreak[Set[Address]](Set(), (pointers) => pointers + (reader.read[Address]())) + reader.close() + return (modularLattice.PointerT, new modularLattice.Pointer(pointers)) + + given MapDecoder[(HMapKey, LoadSchemeLattice#Cons)]() with + override def read(reader: Reader): (HMapKey, LoadSchemeLattice#Cons) = + reader.start() + val car = reader.readMember[LoadSchemeLattice#L]("car") + val cdr = reader.readMember[LoadSchemeLattice#L]("cdr") + reader.close() + return (modularLattice.ConsT, new modularLattice.Cons(car, cdr)) + + given MapDecoder[(HMapKey, LoadSchemeLattice#Vec)] with LoadMapToArray with + override def read(reader: Reader): (HMapKey, LoadSchemeLattice#Vec) = + reader.start() + val size = reader.readMember[I]("size") + val elements = + reader + .readMember[Map[I, LoadSchemeLattice#L]]("elements") + .asInstanceOf[Map[LoadModularSchemeDomain.this.modularLatticeWrapper.I, LoadSchemeLattice#L]] + reader.close() + return (modularLattice.VecT, new modularLattice.Vec(size, elements)) + + given nilLatticeDecoder: Decoder[(HMapKey, modularLattice.Nil.type)] with + override def read(reader: Reader): (HMapKey, modularLattice.Nil.type) = + reader.readString() + return (modularLattice.NilT, modularLattice.Nil) + + given Decoder[(HMapKey, modularLattice.Void.type)] with + override def read(reader: Reader): (HMapKey, modularLattice.Void.type) = + reader.readString() + return (modularLattice.VoidT, modularLattice.Void) + +/** + * Base trait for decoding values as [[ModularSchemeLattice modular scheme lattices]], as defined in [[ModularSchemeDomain]]. + * + * @note + * This trait gives the methods needed to decode values, but does not implement them yet, other traits like [[LoadModularSchemeDomain]] should be + * mixed in for the implementation. The trait that should be mixed in depends on the kind of values that is used in your analysis. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait LoadModularDomain extends LoadValue[SchemeExp] with ModularSchemeDomain: + /** Returns a map that links a key to a specific decoder. */ + def hMapDecoders = Set[(String, Decoder[_ <: (HMapKey, Any)])]() + + given MapDecoder[(HMapKey, Any)] with + override def read(reader: Reader): (HMapKey, Any) = + reader.start() + val hmap = reader.readMembers(hMapDecoders.toMap) + reader.close() + return hmap._2 + + override given valueDecoder: ArrayDecoder[HMap] with + override def read(reader: Reader): HMap = + reader.start() + val hmap = reader.readUntilBeforeBreak[Map[HMapKey, Any]](Map(), (hMap) => hMap + reader.readMember[(HMapKey, Any)]()._2) + reader.close() + return new HMap(hmap) + +/** + * Trait to decode the global store. + * + * This adds the global store to the objects that should be loaded, but does not have an implementation that can be used to decode the + * [[AbstractDomain.Value values]] inside of the store, for this an implementation of [[LoadValue]] like [[LoadModularDomain]] should be included + * depending on the values that are used in your analysis. + */ +trait LoadGlobalStore[Expr <: Expression] extends LoadValue[Expr] with LoadAddr[Expr] with LoadMapToArray with GlobalStore[Expr]: + override def loadInfo = super.loadInfo ++ List(("store", Loadable((store: Map[Addr, Value]) => this.store = store))) diff --git a/code/shared/src/main/scala/maf/persistence/load/Util.scala b/code/shared/src/main/scala/maf/persistence/load/Util.scala new file mode 100644 index 000000000..6f02e80ee --- /dev/null +++ b/code/shared/src/main/scala/maf/persistence/load/Util.scala @@ -0,0 +1,46 @@ +package maf.save + +import io.bullet.borer.Decoder +import io.bullet.borer.Reader +import scala.collection.mutable + +/** + * Trait to decode a map using an array. + * + * This will load a map from an array with alternating keys and values: + * {{{ + * [ + * < key1 >, + * < value1 >, + * < key2 >, + * < value2 >, + * ... + * ] + * }}} + * This can, for example be used if the key is not a string, and can therefore not be used as a key of a JSON map. + */ +trait LoadMapToArray: + /** + * Decodes a map using an array. + * + * This will load a map from an array with alternating keys and values: + * {{{ + * [ + * < key1 >, + * < value1 >, + * < key2 >, + * < value2 >, + * ... + * ] + * }}} + * This can, for example be used if the key is not a string, and can therefore not be used as a key of a JSON map. + */ + given mapKeyDecoder[K, V](using keyDecoder: Decoder[K], valueDecoder: Decoder[V]): ArrayKeyDecoder[Map[K, V]] with + override def read(reader: Reader): Map[K, V] = + reader.start() + val elements = mutable.Set[(K, V)]() + while !reader.hasBreak do + val res = reader.readMember[K, V]()(using keyDecoder, valueDecoder) + elements.add(res._1, res._2) + reader.close() + return elements.toMap diff --git a/code/shared/src/main/scala/maf/persistence/save/Analysis.scala b/code/shared/src/main/scala/maf/persistence/save/Analysis.scala new file mode 100644 index 000000000..bfd6b58d3 --- /dev/null +++ b/code/shared/src/main/scala/maf/persistence/save/Analysis.scala @@ -0,0 +1,124 @@ +package maf.save + +import io.bullet.borer.{Encoder, Writer} +import io.bullet.borer.Json +import maf.util.Writer.write +import maf.core.Expression +import java.nio.file.Paths +import java.nio.file.Files +import maf.language.scheme.SchemeExp +import maf.save.save.SaveSchemeExpressions +import maf.save.save.SaveRecursiveSchemeExpressionsIntID +import maf.modular.AnalysisEntry +import maf.modular.ModAnalysis +import maf.save.save.SaveWorklistExpressionsID +import io.bullet.borer.Cbor +import maf.save.save.SaveActualExpressions +import maf.save.save.SaveExpressionIntID +import maf.save.save.SaveMainSchemeBody + +/** + * Contains info about the top-level objects that need to be saved. + * + * @param value + * The value that needs to be saved + * @param encoder + * Encodes the value + * @tparam T + * The type of the value the needs to be saved + */ +case class Savable[T](val value: T)(using val encoder: Encoder[T]) + +/** + * The base trait for saving an analysis. + * + * Implementing this allows you to save your analysis, by default it will only save the name of your analysis and you should mixin other traits like + * [[SaveComponents]] to also save components. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait Save[Expr <: Expression] extends AnalysisEntry[Expr]: + /** Encode an analysis. */ + given analysisEncoder: MapEncoder[Save[Expr]] with + override def write(writer: Writer, value: Save[Expr]): Writer = + writer.start() + for (key, value) <- saveInfo do writer.writeMember(key, value.value)(using value.encoder) + writer.close() + + protected var saveSet = Set[String]() + protected given excludedAnalysisEncoder: MapEncoder[Save[Expr]] with + override def write(writer: Writer, value: Save[Expr]): Writer = + writer.start() + for (key, value) <- saveInfo do if saveSet.contains(key) then writer.writeMember(key, value.value)(using value.encoder) + writer.close() + + def startSave(): Unit = return + override def save(filename: String): Unit = + startSave() + val res = Json.encode(this)(using analysisEncoder).toByteArray + Files.write(Paths.get(filename), res) + + override def save(filename: String, save: Set[String]): Unit = + startSave() + this.saveSet = save + val res = Json.encode(this)(using excludedAnalysisEncoder).toByteArray + this.saveSet = Set[String]() + Files.write(Paths.get(filename), res) + + /** + * Returns a map strings and [[Savable]] s. + * + * This map defines all top-level objects that should be saved in your analysis, and the key with which they should be saved. If you want to save + * something else, you can override this method and add something to it. + * + * {{{ + * override def saveInfo: Map[String, Savable[_]] = + * super.saveInfo + ("< key >" -> Savable(< saveValue >)) + * }}} + */ + def saveInfo: List[(String, Savable[_])] = List(("name", Savable(analysisName))) + +/** + * Trait to allow you to save analyses using CBOR instead of JSON. + * + * Implementing this allows you to save your analysis, by default it will only save the name of your analysis and you should mixin other traits like + * [[SaveComponents]] to also save components. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait SaveCbor[Expr <: Expression] extends Save[Expr]: + override def save(filename: String): Unit = + startSave() + val res = Cbor.encode(this)(using analysisEncoder).toByteArray + Files.write(Paths.get(filename), res) + + override def save(filename: String, save: Set[String]): Unit = + startSave() + this.saveSet = save + val res = Cbor.encode(this)(using excludedAnalysisEncoder).toByteArray + this.saveSet = Set[String]() + Files.write(Paths.get(filename), res) + +trait SaveInitialized[Expr <: Expression] extends ModAnalysis[Expr] with Save[Expr]: + override def saveInfo: List[(String, Savable[_])] = super.saveInfo ++ List(("initialized", Savable(analysisInitialized))) + +/** The trait used to save the modF analysis. */ +trait SaveModF + extends Save[SchemeExp] + with SaveInitialized[SchemeExp] + with SaveSchemeExpressions + with SaveRecursiveSchemeExpressionsIntID + with SaveComponentIntID[SchemeExp] + with SaveWorklistExpressionsID[SchemeExp] + with SaveMainSchemeBody + with SaveStandardSchemeComponents + with SaveModularSchemeDomain + with SaveSchemeConstantPropagationDomain + with SaveAddrDep[SchemeExp] + with SaveSchemeAddr + with SaveGlobalStore[SchemeExp] + with SaveSequentialWorklist[SchemeExp] + with SaveNoContext[SchemeExp]: + override val maxASTHeight: Int = 0 diff --git a/code/shared/src/main/scala/maf/persistence/save/Component.scala b/code/shared/src/main/scala/maf/persistence/save/Component.scala new file mode 100644 index 000000000..65332891a --- /dev/null +++ b/code/shared/src/main/scala/maf/persistence/save/Component.scala @@ -0,0 +1,268 @@ +package maf.save + +import io.bullet.borer.Encoder +import io.bullet.borer.Encoder.forIterableOnce +import io.bullet.borer.Writer +import maf.core.Address +import maf.core.Expression +import maf.core.Position.Position +import maf.language.scheme.SchemeExp +import maf.modular.AddrDependency +import maf.modular.AnalysisResults +import maf.modular.Dependency +import maf.modular.scheme.modf.SchemeModFComponent +import maf.modular.scheme.modf.StandardSchemeModFComponents +import io.bullet.borer.derivation.MapBasedCodecs +import maf.language.scheme.SchemeLambdaExp +import maf.core.Identifier +import maf.core.Identity +import maf.core.IdentityData +import maf.language.scheme.SchemeFuncall +import maf.language.scheme.SchemeLambda +import io.bullet.borer.derivation.ArrayBasedCodecs +import maf.language.scheme.SchemeVarArgLambda +import maf.language.scheme.SchemeIf +import maf.language.scheme.SchemeLet +import maf.language.scheme.SchemeVar +import maf.core.BasicEnvironment +import maf.core.Environment +import maf.core.WrappedEnv +import maf.core.NestedEnv +import maf.modular.scv.ScvContextSensitivity +import maf.modular.scheme.modf.NoContext +import maf.core.Position +import maf.core.Position.PTag +import scala.collection.mutable.HashMap +import maf.language.scheme.SchemeLetrec +import maf.language.scheme.SchemeAssert +import maf.language.scheme.SchemeValue +import maf.language.scheme.SchemeSet +import maf.language.scheme.SchemeBegin +import maf.language.scheme.SchemeLetStar +import maf.save.save.SaveExpressions +import maf.core.worklist.WorkList +import maf.modular.worklist.SequentialWorklistAlgorithm +import maf.modular.worklist.FIFOWorklistAlgorithm +import maf.modular.ModAnalysis +import io.bullet.borer.derivation.CompactMapBasedCodecs + +/** + * Trait to encode positions. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait SavePosition[Expr <: Expression] extends Save[Expr]: + given Encoder[Position] = CompactMapBasedCodecs.deriveAllEncoders[Position] + given Encoder[PTag] = CompactMapBasedCodecs.deriveAllEncoders[PTag] + given Encoder[Identifier] = CompactMapBasedCodecs.deriveEncoder[Identifier] + given Encoder[Identity] = CompactMapBasedCodecs.deriveAllEncoders[Identity] + given Encoder[IdentityData] with + def write(writer: Writer, value: IdentityData): Writer = + System.err.nn.println("IdentityData could not be encoded") + writer + +/** + * The base trait for encoding components. + * + * @note + * This trait gives the methods needed to encode components, but does not implement them yet, other traits like [[SaveStandardSchemeComponents]] or + * [[SaveStandardSchemeComponentID]] should be mixed in for the implementation. The trait that should be mixed in depends on the kind of components + * are used in your analysis. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait SaveComponents[Expr <: Expression] extends ModAnalysis[Expr] with Save[Expr]: + /** Encodes a component. */ + given componentEncoder: Encoder[Component] + +protected trait SaveActualComps[Expr <: Expression] extends SaveComponents[Expr]: + given actualComponentEncoder: Encoder[Component] + +trait SaveActualComponents[Expr <: Expression] extends SaveActualComps[Expr]: + override given componentEncoder: Encoder[Component] = actualComponentEncoder + +/** + * Base trait for encoding components only by their ID, and not in their entirety. + * + * @note + * This trait gives the methods needed to encode components using their ID, instead of saving it entirely, but no implementation. Other traits like + * [[SaveStandardSchemeComponentID]] should be mixed in to give the implementation based on what ID you want to use and what components your + * analysis uses. + * + * @note + * Because this trait only encodes the component IDs, the entire component should be encoded somewhere else if you want to decode this again. + * + * @tparam T + * The type of the value the needs to be saved + */ +trait SaveComponentID[Expr <: Expression] extends SaveActualComps[Expr] with SavePosition[Expr]: + /** Encodes a component by their ID */ + given componentIDEncoder: Encoder[Component] + override given componentEncoder: Encoder[Component] = componentIDEncoder + + /** Encodes a set of components */ + protected given componentSetEncoder: Encoder[Set[Component]] + override def saveInfo: List[(String, Savable[_])] = super.saveInfo ++ List(("components", Savable(visited))) + +/** + * Trait that encodes components using an autoincreasing integer ID. + * + * Implementation of [[SaveComponentID]] + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait SaveComponentIntID[Expr <: Expression] extends SaveActualComps[Expr] with SaveComponentID[Expr]: + private var components = HashMap[Component, Int]() + private var id = 0 + + override protected given componentSetEncoder: MapEncoder[Set[Component]] with + override def write(writer: Writer, components: Set[Component]): Writer = + writer.start() + for (component <- components) do + SaveComponentIntID.this.components.addOne((component, id)) + writer.writeMember(id.toString(), component)(using actualComponentEncoder) + id += 1 + writer.close() + + override given componentIDEncoder: Encoder[Component] with + override def write(writer: Writer, component: Component): Writer = writer.write(components(component)) + + override def startSave(): Unit = + id = 0 + components = HashMap[Component, Int]() + super.startSave() + +/** + * Trait that encodes components using their position. + * + * Implementation of [[SaveComponentID]] + * + * @note + * Because this trait only encodes the component position, the entire component should be encoded somewhere else if you want to decode this again. + */ +trait SaveStandardSchemeComponentPosition extends SaveComponentID[SchemeExp] with StandardSchemeModFComponents: + override type Component = SchemeModFComponent + + /** Encodes a component by their position */ + override given componentIDEncoder: Encoder[Component] with + def write(writer: Writer, component: Component): Writer = + if component.equals(initialComponent) then writer.write("main") + else writer.write(component.asInstanceOf[SchemeModFComponent.Call[ComponentContext]])(schemeComponentIDEncoder) + + /** Encodes a scheme component using their position */ + given schemeComponentIDEncoder[T]: Encoder[SchemeModFComponent.Call[T]] with + def write(writer: Writer, component: SchemeModFComponent.Call[T]): Writer = + val (lambda, _) = component.clo + writer.write(lambda.idn.pos) + + override protected given componentSetEncoder: ArrayEncoder[Set[Component]] with + override def write(writer: Writer, components: Set[Component]): Writer = + writer.start() + for (component <- components) do writer.writeMember(component)(using actualComponentEncoder) + writer.close() + +/** + * Trait to encode environments. + * + * @tparam T + * The type of the value the needs to be saved + */ +trait SaveEnvironment[Expr <: Expression] extends Save[Expr] with SaveAddr[Expr]: + given Encoder[BasicEnvironment[Address]] with + override def write(writer: Writer, env: BasicEnvironment[Address]): Writer = writer.write(env.content) + + given MapEncoder[NestedEnv[Address, Address]] with + override def write(writer: Writer, env: NestedEnv[Address, Address]): Writer = + writer.start() + writer.writeMember("content", env.content) + writer.writeMember("rst", env.rst) + writer.close() + + given MapEncoder[Environment[Address]] with + override def write(writer: Writer, env: Environment[Address]): Writer = + writer.start() + env match { + case basicEnv @ BasicEnvironment(_) => + writer.writeMember("basicEnvironment", basicEnv) + case nestedEnv @ NestedEnv(_, _) => + writer.writeMember("nestedEnvironment", nestedEnv.asInstanceOf[NestedEnv[Address, Address]]) + case _ => + System.err.nn.println("The environemnt with type `" + env.getClass + "` could not be encoded") + writer + } + writer.close() + +/** + * Base trait for saving context. + * + * @note + * This trait gives the methods needed to encode context, but not the implementation. Other traits like [[SaveNoContext]] should be mixed in. The + * exact trait that is mixed in depends on the Context that you are using in your analysis. + * + * @tparam T + * The type of the value the needs to be saved + */ +trait SaveContext[Expr <: Expression] extends Save[Expr]: + /** The type of context that should be encoded. */ + type EncodeContext + + /** Encodes context */ + given contextEncoder: Encoder[EncodeContext] + +/** + * Trait to encode the context for an analysis with no context. + * + * This will just write 'None' when asked to write the context. + * + * @tparam T + * The type of the value the needs to be saved + */ +trait SaveNoContext[Expr <: Expression] extends SaveContext[Expr]: + override type EncodeContext = NoContext.type + override given contextEncoder: Encoder[EncodeContext] with + override def write(writer: Writer, context: EncodeContext): Writer = writer.write("None") + +/** + * Trait to encode standard scheme components. + * + * This is an implementation of [[SaveComponents]]. + */ +trait SaveStandardSchemeComponents + extends SaveActualComps[SchemeExp] + with StandardSchemeModFComponents + with SaveEnvironment[SchemeExp] + with SaveContext[SchemeExp] + with SaveExpressions[SchemeExp]: + override type Component = SchemeModFComponent + + override given actualComponentEncoder: MapEncoder[Component] with + def write(writer: Writer, component: Component): Writer = + if component.equals(initialComponent) then writer.write("main") + else writer.write(component.asInstanceOf[SchemeModFComponent.Call[ComponentContext]]) + + given [T]: MapEncoder[SchemeModFComponent.Call[T]] with + override def write(writer: Writer, component: SchemeModFComponent.Call[T]): Writer = + writer.start() + val (lambda, env) = component.clo + val context = component.ctx + writer.writeMember("lambda", lambda.asInstanceOf[SchemeExp]) + writer.writeMember("environment", env) + writer.writeMember("context", context.asInstanceOf[EncodeContext]) + writer.close() + +trait SaveWorklist[Expr <: Expression] extends Save[Expr] with SaveComponents[Expr]: + given worklistEncoder: Encoder[WorkList[Component]] + def getWorklist: WorkList[Component] + override def saveInfo: List[(String, Savable[_])] = super.saveInfo ++ List(("worklist", Savable(getWorklist))) + +trait SaveSequentialWorklist[Expr <: Expression] extends SaveWorklist[Expr] with SequentialWorklistAlgorithm[Expr]: + override def getWorklist: WorkList[Component] = workList + override given worklistEncoder: ArrayEncoder[WorkList[Component]] with + override def write(writer: Writer, worklist: WorkList[Component]): Writer = + writer.start() + val worklistList = worklist.toList + worklistList.foreach(writer.writeMember(_)) + writer.close() diff --git a/code/shared/src/main/scala/maf/persistence/save/Dependency.scala b/code/shared/src/main/scala/maf/persistence/save/Dependency.scala new file mode 100644 index 000000000..54fe7d7d8 --- /dev/null +++ b/code/shared/src/main/scala/maf/persistence/save/Dependency.scala @@ -0,0 +1,141 @@ +package maf.save + +import maf.language.scheme.SchemeExp +import maf.modular.Dependency +import io.bullet.borer.Writer +import maf.modular.AddrDependency +import maf.core.Expression +import maf.core.Address +import maf.modular.scheme.VarAddr +import maf.modular.ReturnAddr +import io.bullet.borer.Encoder +import maf.modular.scheme.PrmAddr +import maf.modular.scheme.PtrAddr +import maf.language.scheme.SchemeValue +import maf.core.Identifier +import maf.util.Writer.write +import maf.save.save.SaveExpressions +import maf.modular.scheme.SchemeAddr + +/** + * Trait to encode address dependencies. + * + * This is an implementation of [[SaveDependency]]. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait SaveAddrDep[Expr <: Expression] extends SaveDependency[Expr] with SavePosition[Expr] with SaveAddr[Expr]: + override protected def encodeDependency(writer: Writer, dependency: Dependency): Writer = + dependency match { + case AddrDependency(addr) => writer.writeMember("addrDependency", addr) + case _ => super.encodeDependency(writer, dependency) + } + +/** + * Base trait for encoding dependencies. + * + * @note + * This trait gives the methods needed to encode dependencies, but not the implementation. Other traits like [[SaveAddrDep]] should be mixed in. The + * exact trait that is mixed in depends on the dependencies that you are using in your analysis. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait SaveDependency[Expr <: Expression] extends SaveMapToArray with SaveComponents[Expr]: + override def saveInfo: List[(String, Savable[_])] = + super.saveInfo ++ List(("dependencies" -> Savable(deps))) + + /** + * Encodes a dependency. + * + * This method allows for expanding the dependencies that can be encoded by overriding it, and allowing you to add new dependencies by simply + * mixin in another trait that overrides this method. If you want to add a new encodable dependency you can override this method like this: + * {{{ + * override def encodeDependency(writer: Writer, dependency: Dependency)(using AbstractEncoder): Writer = + * dependency match { + * case < Dependency >(...) => < encode dependency > + * case _ => super.encodeDependency(writer, dependency) + * } + * }}} + * This is just an example and the actual implementation can also be done differently. + * + * @note + * This method should not be called directly, but should instead only be called from an encoder. + * + * @param writer + * The writer to write to + * @param dependency + * The dependency to encode + * @param encoder + * Implicit argument that encodes the dependency + * @return + * The used writer + */ + protected def encodeDependency(writer: Writer, dependency: Dependency): Writer = + System.err.nn.println("The dependency with type `" + dependency.getClass + "` could not be encoded") + writer + + given MapEncoder[Dependency] with + override def write(writer: Writer, value: Dependency): Writer = + writer.start() + encodeDependency(writer, value) + writer.close() + +/** + * Base trait for encoding addresses. + * + * @note + * This trait gives the methods needed to encode addresses, but not the implementation. Other traits like [[SaveSchemeAddr]] should be mixed in. The + * exact trait that is mixed in depends on the addresses that you are using in your analysis. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait SaveAddr[Expr <: Expression] extends Save[Expr] with SavePosition[Expr]: + given addressEncoder: Encoder[Address] + +/** + * Trait to encode scheme addresses. + * + * This is an implementation of [[SaveAddr]]. + */ +trait SaveSchemeAddr extends SaveAddr[SchemeExp] with SaveComponents[SchemeExp] with SaveContext[SchemeExp] with SaveExpressions[SchemeExp]: + given MapEncoder[VarAddr[EncodeContext]] with + override def write(writer: Writer, address: VarAddr[EncodeContext]): Writer = + writer.start() + writer.writeMember("id", address.id) + writer.writeMember("context", address.ctx.asInstanceOf[Option[EncodeContext]]) + writer.close() + + given MapEncoder[ReturnAddr[Component]] with + override def write(writer: Writer, address: ReturnAddr[Component]): Writer = + writer.start() + writer.writeMember("identity", address.idn) + writer.writeMember("component", address.cmp) + writer.close() + + given MapEncoder[PtrAddr[EncodeContext]] with + override def write(writer: Writer, address: PtrAddr[EncodeContext]): Writer = + writer.start() + writer.writeMember("expression", address.exp.asInstanceOf[SchemeExp]) + writer.writeMember("context", address.ctx.asInstanceOf[Option[EncodeContext]]) + writer.close() + + override given addressEncoder: MapEncoder[Address] with + override def write(writer: Writer, address: Address) = + writer.start() + address match { + case varAddr @ VarAddr(_, _) => + writer.writeMember("varAddr", varAddr.asInstanceOf[VarAddr[EncodeContext]]) + case returnAddr @ ReturnAddr(_, _) => + writer.writeMember("returnAddr", returnAddr.asInstanceOf[ReturnAddr[Component]]) + case PrmAddr(nam) => + writer.writeMember("prmAddr", nam) + case ptrAddr @ PtrAddr(_, _) => + writer.writeMember("ptrAddr", ptrAddr.asInstanceOf[PtrAddr[EncodeContext]]) + case _ => + System.err.nn.println("The scheme address with type `" + address.getClass + "` could not be encoded") + writer + } + writer.close() diff --git a/code/shared/src/main/scala/maf/persistence/save/Encoder.scala b/code/shared/src/main/scala/maf/persistence/save/Encoder.scala new file mode 100644 index 000000000..fba92cfc4 --- /dev/null +++ b/code/shared/src/main/scala/maf/persistence/save/Encoder.scala @@ -0,0 +1,378 @@ +package maf.save + +import io.bullet.borer.Encoder +import io.bullet.borer.Writer +import io.bullet.borer.derivation.MapBasedCodecs +import io.bullet.borer.derivation.ArrayBasedCodecs +import scala.collection.mutable.HashMap +import io.bullet.borer.derivation.CompactMapBasedCodecs +import scala.concurrent.Future +import scala.concurrent.Promise +import scala.util.{Failure, Success} +import scala.concurrent.ExecutionContext.Implicits.global + +/** + * Base trait for an encoder. + * + * This trait has methods for writing keys and values with a encoder to a given writer and depending on the implementation off this trait, this can + * either write your values to a map or an array. This allows you to encode your values in a certain way, but without knowing exactly how it will be + * stored (e.g. in a map or in an array). + * + * This trait also has method for opening and closing an encapsulation, this will open/close either a map or an array based on the implementation of + * this trait. + * + * @note + * This trait gives the methods needed to write values, but not the implementation. Other traits like [[MapEncoder]] or [[ArrayEncoder]] should be + * used depending on how you want your value to be encoded. + * + * @note + * Some encoders like [[ArrayEncoder]] will also not encode your key, because arrays do not require keys like maps do, if the key does need to be + * stored, you should use an encoder that writes them down like [[MapEncoder]] or [[ArrayKeyEncoder]] if you want to use an array. + */ +trait AbstractEncoder[T] extends Encoder[T]: + /** + * Write a generated key to the given writer. + * + * @note + * This only writes a key and should therefore be followed up by either writing a value using [[writeValue]] or opening an map or array using + * [[openEncapsulation]]. + * + * @note + * In some encoders like [[ArrayEncoder]] this will not write anything as this encoder does not use keys. + * + * @note + * The key that is generated depends on the encoder that is used. + * + * @param writer + * The writer used write the key + * @return + * The used writer + */ + def writeKey(writer: Writer): Writer + + /** + * Write the given key to the given writer. + * + * @note + * This only writes a key and should therefore be followed up by either writing a value using [[writeValue]] or opening an map or array using + * [[openEncapsulation]]. + * + * @note + * In some encoders like [[ArrayEncoder]] this will not write anything as this encoder does not use keys. + * + * @param writer + * The writer used write the key + * @param key + * The key to write + * @return + * The used writer + */ + def writeKey(writer: Writer, key: String): Writer + + /** + * Write a given value to the given writer. + * + * @note + * This only writes a value and should therefore only be called after writing a key using [[writeKey]]. + * + * @param writer + * The writer used write the value + * @param value + * The value to write + * @return + * The used writer + */ + def writeValue[T: Encoder](writer: Writer, value: T): Writer + + /** + * Opens either a new map or a new array, based on the encoder that is used. + * + * @note + * This only opens a new encapsulation and should therefore only be called after writing a key using [[writeKey]]. + * + * @param writer + * The writer used write the array/map + * @return + * The used writer + */ + def openEncapsulation(writer: Writer): Writer + + /** + * Opens either a new map or a new array, based on the encoder that is used. + * + * @note + * This only opens a new encapsulation and should therefore only be called after writing a key using [[writeKey]]. + * + * @param writer + * The writer used write the array/map + * @param amount + * The amount of elements the map/array, this is only useful when using CBOR the length of maps and arrays is used in the encoding + * @return + * The used writer + */ + def openEncapsulation(writer: Writer, amount: Int): Writer + + /** + * Closes the map/array, based on the encoder that is used. + * + * @note + * This only closes the encapsulation and should therefore only be called after having already opened an encapsulation using + * [[openEncapsulation]]. + * + * @param writer + * The writer used write the array/map + * @return + * The used writer + */ + def closeEncapsulation(writer: Writer): Writer = writer.writeBreak() + + /** [TODO: description] */ + def encodeOption[T: Encoder](writer: Writer, key: String, option: Option[T]): Writer + + extension (writer: Writer) + /** + * Close the encapsulation. + * + * This will close either a map or an array based on the encoder that is given. + * + * @param encoder + * Implicit argument that decides how to close the encapsulation + */ + def close(): Writer = closeEncapsulation(writer) + + /** + * Write a key-value pair. + * + * This will write a key-value pair in either a map or an array, based on the encoder that is given. + * + * @param key + * The key to write + * @param value + * The value to write + * @param encoder + * Implicit argument that decides how to write the key-value pair + * @tparam T + * The type of the value that should be written, this type should have an encoder + */ + def writeMember[T: Encoder](key: String, value: T): Writer = + writeKey(writer, key) + writeValue(writer, value) + + /** + * Write a value. + * + * This will write a value in either a map or an array, based on the encoder that is given. Certain encoders like map-based encoders will + * create a key and write this first if this method is called. + * + * @param value + * The value to write + * @param encoder + * Implicit argument that decides how to write the value + * @tparam T + * The type of the value that should be written, this type should have an encoder + */ + def writeMember[T: Encoder](value: T): Writer = + writeKey(writer) + writeValue(writer, value) + + def writeMember[T: Encoder](key: String, value: Option[T]): Writer = + encodeOption(writer, key, value) + + /** [TODO: description] */ + def start() = + openEncapsulation(writer) + + /** + * Open the encapsulation. + * + * This will open either a map or an array based on the encoder that is given. Certain encoders like map-based encoders will create a key and + * write this first if this method is called. + * + * @param encoder + * Implicit argument that decides how to open the encapsulation + */ + def open(): Writer = + writeKey(writer) + openEncapsulation(writer) + + /** + * Open the encapsulation with a key. + * + * This will open either a map or an array based on the encoder that is given with a given key. + * + * @param key + * The key to write + * @param encoder + * Implicit argument that decides how to open the encapsulation + */ + def open(key: String): Writer = + writeKey(writer, key) + openEncapsulation(writer) + + /** + * Open the encapsulation. + * + * This will open either a map or an array based on the encoder that is given. Certain encoders like map-based encoders will create a key and + * write this first if this method is called. + * + * @param amount + * The amount of elements the map/array, this is only useful when using CBOR the length of maps and arrays is used in the encoding + * @param encoder + * Implicit argument that decides how to open the encapsulation + */ + def open(amount: Int): Writer = + writeKey(writer) + openEncapsulation(writer, amount) + + /** + * Open the encapsulation with a key. + * + * This will open either a map or an array based on the encoder that is given with a given key. + * + * @param key + * The key to write + * @param amount + * The amount of elements the map/array, this is only useful when using CBOR the length of maps and arrays is used in the encoding + * @param encoder + * Implicit argument that decides how to open the encapsulation + */ + def open(key: String, amount: Int): Writer = + writeKey(writer, key) + openEncapsulation(writer) + +/** + * Encoder that uses maps to encode values. + * + * This encoder uses maps to encode values and will therefore always use keys, if no key is provided, an auto-increasing ID will be used instead. + * + * @example + * {{{ + * given MapEncoder[T] + * override protected def write(writer: Writer, value: T): Writer = + * writer.start() + * < encode value > + * writer.close() + * }}} + * + * @tparam T + * The type to encode + */ +trait MapEncoder[T] extends AbstractEncoder[T]: + /** Used to generate IDs if no key is provided */ + private var id = -1 + override def writeKey(writer: Writer): Writer = + id += 1 + writeKey(writer, id.toString()) + override def writeKey(writer: Writer, key: String): Writer = writer.write(key) + override def writeValue[T: Encoder](writer: Writer, value: T): Writer = writer.write(value) + override def openEncapsulation(writer: Writer): Writer = writer.writeMapStart() + override def openEncapsulation(writer: Writer, amount: Int): Writer = writer.writeMapOpen(amount) + override def encodeOption[T: Encoder](writer: Writer, key: String, option: Option[T]): Writer = + if option.isDefined then writer.writeMember(key, option.get) + writer + +/** + * Encoder that uses arrays to encode values. + * + * This encoder uses arrays to encode values and will not write any keys, if you want an array-based encoder that saves keys, you should use + * [[ArrayKeyEncoder]]. + * + * @example + * {{{ + * given ArrayEncoder[T] + * override protected def write(writer: Writer, value: T): Writer = + * writer.start() + * < encode value > + * writer.close() + * }}} + * + * @tparam T + * The type to encode + */ +trait ArrayEncoder[T] extends AbstractEncoder[T]: + override def writeKey(writer: Writer): Writer = writer + override def writeKey(writer: Writer, key: String): Writer = writer + override def writeValue[T: Encoder](writer: Writer, value: T): Writer = writer.write(value) + override def openEncapsulation(writer: Writer): Writer = writer.writeArrayStart() + override def openEncapsulation(writer: Writer, amount: Int): Writer = writer.writeArrayOpen(amount) + override def encodeOption[T: Encoder](writer: Writer, key: String, option: Option[T]): Writer = + writer.writeArrayStart() + if option.isDefined then writer.write(option.get) + writer.writeBreak() + +/** + * Encoder that uses arrays to encode values, but preserves keys. + * + * This encoder uses arrays to encode values but will save key-value pairs by first writing the key and then the value. This can be used, for example, + * if your keys are not strings. Since non-string keys are not supported in JSON, you cannot use a map for this, but this class does allow you to save + * your key-value pair in an intuitive way. + * + * This is how your key-value pair would be saved: + * {{{ + * [ + * < key1 >, + * < value1 >, + * < key2 >, + * < value2> , + * ... + * ] + * }}} + * + * @example + * {{{ + * given ArrayKeyEncoder[T] + * override protected def write(writer: Writer, value: T): Writer = + * writer.start() + * < encode value > + * writer.close() + * }}} + * + * @tparam T + * The type to encode + */ +trait ArrayKeyEncoder[T] extends ArrayEncoder[T]: + override def writeKey(writer: Writer, key: String): Writer = writer.write(key) + + /** + * Write the given key to the given writer. + * + * @note + * This only writes a key and should therefore be followed up by either writing a value using [[writeValue]] or opening an map or array using + * [[openEncapsulation]]. + * + * @note + * In some encoders like [[ArrayEncoder]] this will not write anything as this encoder does not use keys. + * + * @param writer + * The writer used write the key + * @param key + * The key to write + * @return + * The used writer + */ + def writeKey[T: Encoder](writer: Writer, key: T): Writer = writer.write(key) + override def writeValue[T: Encoder](writer: Writer, value: T): Writer = writer.write(value) + override def encodeOption[T: Encoder](writer: Writer, key: String, option: Option[T]): Writer = + if option.isDefined then writer.writeMember(key, option.get) + writer + + extension (writer: Writer) + /** + * Write a key-value pair. + * + * This will write a key-value pair in either a map or an array, based on the encoder that is given. + * + * @param key + * The key to write + * @param value + * The value to write + * @param encoder + * Implicit argument that decides how to write the key-value pair + * @tparam T + * The type of the key that should be written, this type should have an encoder + * @tparam U + * The type of the value that should be written, this type should have an encoder + */ + def writeMember[T: Encoder, U: Encoder](key: T, value: U): Writer = + writeKey(writer, key) + writeValue(writer, value) diff --git a/code/shared/src/main/scala/maf/persistence/save/Expression.scala b/code/shared/src/main/scala/maf/persistence/save/Expression.scala new file mode 100644 index 000000000..00f0d7d91 --- /dev/null +++ b/code/shared/src/main/scala/maf/persistence/save/Expression.scala @@ -0,0 +1,257 @@ +package maf.save.save + +import maf.core.Expression +import maf.save.Save +import maf.save.AbstractEncoder +import io.bullet.borer.Encoder +import maf.save.Savable +import scala.collection.mutable.HashMap +import io.bullet.borer.Writer +import maf.language.scheme.SchemeExp +import maf.language.scheme.SchemeFuncall +import maf.language.scheme.SchemeLambda +import maf.language.scheme.SchemeVarArgLambda +import maf.language.scheme.SchemeLetrec +import maf.language.scheme.SchemeAssert +import maf.language.scheme.SchemeLet +import maf.language.scheme.SchemeIf +import maf.language.scheme.SchemeSet +import maf.language.scheme.SchemeBegin +import maf.language.scheme.SchemeLetStar +import maf.save.SavePosition +import maf.save.ArrayEncoder +import maf.language.scheme.SchemeVar +import maf.language.scheme.SchemeValue +import maf.language.scheme.SchemeLambdaExp +import maf.modular.ModAnalysis +import maf.save.MapEncoder +import io.bullet.borer.derivation.CompactMapBasedCodecs +import maf.save.SaveWorklist +import maf.modular.scheme.modf.BaseSchemeModFSemanticsM + +/** + * The base trait for encoding expressions. + * + * @note + * This trait gives the methods needed to encode expressions, but does not implement them yet, other traits like [[SaveSchemeExpressions]] or + * [[SaveExpressionIntID]] should be mixed in for the implementation. The trait that should be mixed in depends on the kind of components are used + * in your analysis. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait SaveExpressions[Expr <: Expression] extends Save[Expr]: + /** Encodes an expression */ + given expressionEncoder: Encoder[Expr] + +/** + * The base trait for encoding expressions. + * + * This trait is used to add [[actualExpressionEncoder]], this given cannot be added into [[SaveExpressions]] because this would cause an ambigious + * implicit with [[expressionEncoder]]. + * + * @note + * This trait gives the methods needed to encode expressions, but does not implement them yet, other traits like [[SaveSchemeExpressions]] or + * [[SaveExpressionIntID]] should be mixed in for the implementation. The trait that should be mixed in depends on the kind of components are used + * in your analysis. + * @note + * This trait should not be used, rather, [[SaveExpressions]] or [[SaveActualExpressions]] should be extended. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait SaveActualExprs[Expr <: Expression] extends SaveExpressions[Expr]: + /** Encodes the actual expression, and doesn't encode it using IDs. */ + protected given actualExpressionEncoder: Encoder[Expr] + +/** + * The base trait for encoding expressions as IDs. + * + * This is an implementation of [[SaveExpressions]] + * + * @note + * This trait gives the methods needed to encode expression IDs, but does not implement them yet, other traits like + * [[SaveRecursiveSchemeExpressionsIntID]] or [[SaveExpressionIntID]] should be mixed in for the implementation. The trait that should be mixed in + * depends on the kind of components are used in your analysis. + * @note + * This trait will first save all necessary expressions separately and use IDs to save them to following times. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait SaveExpressionID[Expr <: Expression] extends ModAnalysis[Expr] with Save[Expr] with SaveActualExprs[Expr]: + override given expressionEncoder: Encoder[Expr] = expressionIDEncoder + override def saveInfo: List[(String, Savable[_])] = super.saveInfo ++ List(("expressions" -> Savable(getExpressions()))) + def getExpressions(): Set[Expr] = return visited.map(expr(_)) + + /** Encodes a set of expressions, this is used to e.g. add ID info to the expressions. */ + protected given expressionSetEncoder: Encoder[Set[Expr]] + + /** Encodes an expression using an ID */ + protected given expressionIDEncoder: Encoder[Expr] + +trait SaveWorklistExpressionsID[Expr <: Expression] extends SaveExpressionID[Expr] with SaveWorklist[Expr]: + override def getExpressions(): Set[Expr] = return super.getExpressions() ++ getWorklist.toSet.map(expr(_)) + +trait SaveMainSchemeBody extends BaseSchemeModFSemanticsM with SaveExpressions[SchemeExp]: + override def saveInfo: List[(String, Savable[?])] = super.saveInfo ++ List(("mainBody" -> Savable(mainBody))) + +/** + * Trait to encode expressions using integer IDs. + * + * Implementation of [[SaveExpressionID]] + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait SaveExpressionIntID[Expr <: Expression] extends SaveExpressionID[Expr] with SaveExpressions[Expr]: + private var expressions = HashMap[Expr, Int]() + private var id = 0 + + override protected given expressionSetEncoder: MapEncoder[Set[Expr]] with + override def write(writer: Writer, exprs: Set[Expr]): Writer = + writer.start() + for (expr <- exprs) do + writer.writeMember(id.toString, expr)(using actualExpressionEncoder) + expressions.addOne((expr, id)) + id += 1 + writer.close() + + override protected given expressionIDEncoder: Encoder[Expr] with + override def write(writer: Writer, expr: Expr): Writer = + if expressions.contains(expr) then writer.write(expressions(expr)) + else writer.write(expr)(using actualExpressionEncoder) + + override def startSave(): Unit = + id = 0 + expressions = HashMap[Expr, Int]() + super.startSave() + +/** + * Trait to encode scheme expressions recursively using integer IDs. + * + * This will recursively save every expression, this means that if you are e.g. encoding an `if` statement, this will first encode the condition, the + * consequence and the alternative, and only then save the actual `if` statement. + * + * Implementation of [[SaveExpressionID]] + */ +trait SaveRecursiveSchemeExpressionsIntID extends SaveExpressionID[SchemeExp] with SaveExpressions[SchemeExp]: + private var expressions = HashMap[SchemeExp, Int]() + private var id = 0 + + /** The max height of the AST before you encode it normally. */ + val maxASTHeight: Int + + override protected given expressionSetEncoder: MapEncoder[Set[SchemeExp]] with + given Encoder[List[SchemeExp]] with + override def write(writer: Writer, exprs: List[SchemeExp]): Writer = + for (expr <- exprs) writer.write(expr)(using recursiveExpressionEncoder) + writer + + given recursiveExpressionEncoder: Encoder[SchemeExp] with + override def write(writer: Writer, expr: SchemeExp): Writer = + if expressions.contains(expr) then return writer + if expr.height > maxASTHeight then + expr match + case funcall: SchemeFuncall => + writer.write(funcall.args) + writer.write(funcall.f)(using recursiveExpressionEncoder) + case lambda: SchemeLambda => writer.write(lambda.body) + case argLambda: SchemeVarArgLambda => writer.write(argLambda.body) + case letrec: SchemeLetrec => + for (binding <- letrec.bindings) writer.write(binding._2)(using recursiveExpressionEncoder) + writer.write(letrec.body) + case assert: SchemeAssert => writer.write(assert.exp)(using recursiveExpressionEncoder) + case let: SchemeLet => + for (binding <- let.bindings) writer.write(binding._2)(using recursiveExpressionEncoder) + writer.write(let.body) + case schemeIf: SchemeIf => + writer.write(schemeIf.cond)(using recursiveExpressionEncoder) + writer.write(schemeIf.cons)(using recursiveExpressionEncoder) + writer.write(schemeIf.alt)(using recursiveExpressionEncoder) + case set: SchemeSet => + writer.write(set.value)(using recursiveExpressionEncoder) + case begin: SchemeBegin => + writer.write(begin.exps) + case letStar: SchemeLetStar => + for (binding <- letStar.bindings) writer.write(binding._2)(using recursiveExpressionEncoder) + writer.write(letStar.body) + case _: SchemeVar => () + case _: SchemeValue => () + case _ => System.err.nn.println("The expression with type `" + expr.getClass + "` could not be encoded") + + writer.writeMember(id.toString(), expr)(using actualExpressionEncoder) + expressions.addOne(expr, id) + id += 1 + writer + + override def write(writer: Writer, exprs: Set[SchemeExp]): Writer = + writer.start() + for (expr <- exprs) do writer.write(expr)(using recursiveExpressionEncoder) + writer.close() + + override protected given expressionIDEncoder: Encoder[SchemeExp] with + override def write(writer: Writer, expr: SchemeExp): Writer = + if expressions.contains(expr) then writer.write(expressions(expr)) + else writer.write(expr)(using actualExpressionEncoder) + + override def startSave(): Unit = + id = 0 + expressions = HashMap[SchemeExp, Int]() + super.startSave() + +/** + * Save the expressions normally. + * + * Implementation of [[SaveExpressions]]. + */ +trait SaveActualExpressions[Expr <: Expression] extends SaveActualExprs[Expr]: + override given expressionEncoder: Encoder[Expr] = actualExpressionEncoder + +/** + * Save Scheme expressions. + * + * Implementation of [[SaveExpressions]]. + */ +trait SaveSchemeExpressions extends SaveActualExprs[SchemeExp] with SaveSchemeSubExpressions with SavePosition[SchemeExp]: + override protected given actualExpressionEncoder: MapEncoder[SchemeExp] with + override def write(writer: Writer, exp: SchemeExp): Writer = + writer.start() + exp match + case funcall: SchemeFuncall => writer.writeMember("funcall", funcall) + case variable: SchemeVar => writer.writeMember("var", variable) + case lambda: SchemeLambda => writer.writeMember("lambda", lambda) + case argLambda: SchemeVarArgLambda => writer.writeMember("argLambda", argLambda) + case value: SchemeValue => writer.writeMember("value", value) + case letrec: SchemeLetrec => writer.writeMember("letrec", letrec) + case assert: SchemeAssert => writer.writeMember("assert", assert) + case let: SchemeLet => writer.writeMember("let", let) + case schemeIf: SchemeIf => writer.writeMember("schemeIf", schemeIf) + case set: SchemeSet => writer.writeMember("set", set) + case begin: SchemeBegin => writer.writeMember("begin", begin) + case letStar: SchemeLetStar => writer.writeMember("letStar", letStar) + case _ => + System.err.nn.println("The scheme expression with type `" + exp.getClass + "` could not be encoded") + writer + writer.close() + +/** + * Save Scheme subexpressions. + * + * Implementation of [[SaveExpressions]]. + */ +trait SaveSchemeSubExpressions extends SaveExpressions[SchemeExp] with SavePosition[SchemeExp]: + given Encoder[SchemeValue] = CompactMapBasedCodecs.deriveEncoder[SchemeValue] + given Encoder[maf.language.sexp.Value] = CompactMapBasedCodecs.deriveAllEncoders[maf.language.sexp.Value] + given Encoder[SchemeFuncall] = CompactMapBasedCodecs.deriveEncoder[SchemeFuncall] + given Encoder[SchemeVar] = CompactMapBasedCodecs.deriveEncoder[SchemeVar] + given Encoder[SchemeLambda] = CompactMapBasedCodecs.deriveEncoder[SchemeLambda] + given Encoder[SchemeVarArgLambda] = CompactMapBasedCodecs.deriveEncoder[SchemeVarArgLambda] + given Encoder[SchemeLambdaExp] = CompactMapBasedCodecs.deriveEncoder[SchemeLambdaExp] + given Encoder[SchemeLetrec] = CompactMapBasedCodecs.deriveEncoder[SchemeLetrec] + given Encoder[SchemeAssert] = CompactMapBasedCodecs.deriveEncoder[SchemeAssert] + given Encoder[SchemeLet] = CompactMapBasedCodecs.deriveEncoder[SchemeLet] + given Encoder[SchemeIf] = CompactMapBasedCodecs.deriveEncoder[SchemeIf] + given Encoder[SchemeSet] = CompactMapBasedCodecs.deriveEncoder[SchemeSet] + given Encoder[SchemeBegin] = CompactMapBasedCodecs.deriveEncoder[SchemeBegin] + given Encoder[SchemeLetStar] = CompactMapBasedCodecs.deriveEncoder[SchemeLetStar] diff --git a/code/shared/src/main/scala/maf/persistence/save/Store.scala b/code/shared/src/main/scala/maf/persistence/save/Store.scala new file mode 100644 index 000000000..95d3589e9 --- /dev/null +++ b/code/shared/src/main/scala/maf/persistence/save/Store.scala @@ -0,0 +1,177 @@ +package maf.save + +import maf.core.Expression +import maf.modular.GlobalStore +import io.bullet.borer.Encoder +import maf.modular.AbstractDomain +import maf.language.scheme.SchemeExp +import maf.modular.scheme.ModularSchemeDomain +import maf.lattice.HMap +import io.bullet.borer.Writer +import maf.lattice.HMapKey +import maf.language.scheme.lattices.ModularSchemeLattice +import scala.reflect.ClassTag +import maf.lattice.AbstractWrapType +import maf.lattice.AbstractType +import maf.lattice.AbstractSetType +import io.bullet.borer.LowPrioEncoders +import maf.core.Address +import maf.core.Environment +import maf.lattice.{ConcreteLattice, ConstantPropagation} +import maf.lattice.Concrete +import maf.save.save.SaveExpressions +import maf.modular.scheme.ModularSchemeLatticeWrapper +import maf.modular.scheme.SchemeConstantPropagationDomain +import maf.lattice.ConstantPropagation.L + +/** + * Base trait for encoding [[AbstractDomain.Value values]]. + * + * @note + * This trait gives the methods needed to encode values, but not the implementation. Other traits like [[SaveModularDomain]] should be mixed in. The + * exact trait that is mixed in depends on the values that you are using in your analysis. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait SaveValue[Expr <: Expression] extends Save[Expr] with AbstractDomain[Expr]: + /** Encodes a value */ + given valueEncoder: Encoder[Value] + +/** + * Trait to encode lattices. + * + * @tparam Expr + * The type of expression used in the analysis + */ +trait SaveLattice[Expr <: Expression] extends Save[Expr]: + /** + * The types of lattices that can be encoded by this trait. + * + * This is used to specify the givens, if this was not used, this given could be used for every class with a single abstract type. + */ + type Lattice[T] = ConstantPropagation.L[T] | Concrete.L[T] + given latticeEncoder[P[T] <: Lattice[T], T: Encoder]: MapEncoder[P[T]] with + override def write(writer: Writer, lattice: P[T]): Writer = + writer.start() + lattice match + case constant: ConstantPropagation.L[T] => + writer.writeMember("constant", constant)(using constantLatticeEncoder) + case _ => System.err.nn.println("The lattice of type `" + lattice.getClass + "` could not be encoded") + writer.close() + + /** Encodes [[ConstantPropagation.L constant lattices]]. */ + given constantLatticeEncoder[T: Encoder]: Encoder[ConstantPropagation.L[T]] with + override def write(writer: Writer, lattice: ConstantPropagation.L[T]): Writer = + lattice match + case ConstantPropagation.Top => writer.write("top") + case ConstantPropagation.Constant(a) => writer.write[T](a) + case ConstantPropagation.Bottom => writer.write("bottom") + writer + +trait SaveModularSchemeDomainLattices extends Save[SchemeExp] with ModularSchemeDomain: + given stringLatticeEncoder: Encoder[S] + given booleanLatticeEncoder: Encoder[B] + given integerLatticeEncoder: Encoder[I] + given realLatticeEncoder: Encoder[R] + given charLatticeEncoder: Encoder[C] + given symbolLatticeEncoder: Encoder[Sym] + +trait SaveSchemeConstantPropagationDomain extends SchemeConstantPropagationDomain with SaveModularSchemeDomainLattices with SaveLattice[SchemeExp]: + override given stringLatticeEncoder: Encoder[S] = latticeEncoder[L, String].asInstanceOf[Encoder[S]] + override given booleanLatticeEncoder: Encoder[B] = latticeEncoder[L, Boolean].asInstanceOf[Encoder[B]] + override given integerLatticeEncoder: Encoder[I] = latticeEncoder[L, BigInt].asInstanceOf[Encoder[I]] + override given realLatticeEncoder: Encoder[R] = latticeEncoder[L, Double].asInstanceOf[Encoder[R]] + override given charLatticeEncoder: Encoder[C] = latticeEncoder[L, Char].asInstanceOf[Encoder[C]] + override given symbolLatticeEncoder: Encoder[Sym] = latticeEncoder[L, String].asInstanceOf[Encoder[Sym]] + +/** + * Trait for encoding values as [[ModularSchemeLattice modular scheme lattices]], as defined in [[ModularSchemeDomain]]. + * + * Implementation of [[SaveValue]]. + */ +trait SaveModularSchemeDomain + extends SaveValue[SchemeExp] + with ModularSchemeDomain + with SaveAddr[SchemeExp] + with SaveExpressions[SchemeExp] + with SaveEnvironment[SchemeExp] + with SaveComponents[SchemeExp] + with SaveModularSchemeDomainLattices: + /** Generic modular scheme lattice that is used for typechecking of nested class inside of this. */ + type SaveSchemeLattice = ModularSchemeLattice[?, S, B, I, R, C, Sym] + + override given valueEncoder: ArrayEncoder[HMap] with + override def write(writer: Writer, hmap: HMap): Writer = + writer.start() + hmap.contents.foreach((key, value) => writer.writeMember((key, value))) + writer.close() + + given MapEncoder[SaveSchemeLattice#Clo]() with + override def write(writer: Writer, closure: SaveSchemeLattice#Clo): Writer = + writer.writeArrayStart() + closure.closures.foreach((clo) => + writer.start() + writer.writeMember("expression", clo._1.asInstanceOf[SchemeExp]) + writer.writeMember("address", clo._2.asInstanceOf[Environment[Address]]) + writer.close() + ) + writer.writeBreak() + + given ArrayEncoder[SaveSchemeLattice#Pointer]() with + override def write(writer: Writer, pointer: SaveSchemeLattice#Pointer): Writer = + writer.start() + pointer.ptrs.foreach(writer.write(_)) + writer.close() + + given MapEncoder[SaveSchemeLattice#Cons]() with + override def write(writer: Writer, cons: SaveSchemeLattice#Cons): Writer = + writer.start() + writer.writeMember("car", cons.car) + writer.writeMember("cdr", cons.cdr) + writer.close() + + given MapEncoder[SaveSchemeLattice#Vec]() with SaveMapToArray with + override def write(writer: Writer, vec: SaveSchemeLattice#Vec): Writer = + writer.start() + writer.writeMember("size", vec.size) + writer.writeMember("elements", vec.elements) + writer.close() + + given MapEncoder[(HMapKey, Any)] with + override def write(writer: Writer, hMapPair: (HMapKey, Any)): Writer = + writer.start() + val (key, value) = hMapPair + + value match { + case int: SaveSchemeLattice#Int => writer.writeMember("int", int.i) + case bool: SaveSchemeLattice#Bool => writer.writeMember("boolean", bool.b) + case str: SaveSchemeLattice#Str => writer.writeMember("string", str.s) + case char: SaveSchemeLattice#Char => writer.writeMember("char", char.c) + case inputPort: SaveSchemeLattice#InputPort => writer.writeMember("inputPort", inputPort.id) + case real: SaveSchemeLattice#Real => writer.writeMember("real", real.r) + case symbol: SaveSchemeLattice#Symbol => writer.writeMember("symbol", symbol.s) + case prim: SaveSchemeLattice#Prim => writer.writeMember("primitive", prim.prims) + case clo: SaveSchemeLattice#Clo => writer.writeMember("closure", clo) + case pointer: SaveSchemeLattice#Pointer => writer.writeMember("pointer", pointer) + case cons: SaveSchemeLattice#Cons => writer.writeMember("cons", cons) + case vec: SaveSchemeLattice#Vec => writer.writeMember("vector", vec) + case kont: SaveSchemeLattice#Kont => writer.writeMember("kont", kont.k.asInstanceOf[Set[Component]]) + case modularLattice.Nil => writer.writeMember("nil", "") + case modularLattice.Void => writer.writeMember("void", "") + case _ => + System.err.nn.println("The lattice with type `" + key.getClass + "` could not be encoded") + writer.writeMember("ERROR", "Unknown type: " + key.getClass.toString()) + } + writer.close() + +/** + * Trait to encode the global store. + * + * This adds the global store to the objects that should be saved, but does not have an implementation that can be used to encode the + * [[AbstractDomain.Value values]] inside of the store, for this an implementation of [[SaveValue]] like [[SaveModularDomain]] should be included + * depending on the values that are used in your analysis. + */ +trait SaveGlobalStore[Expr <: Expression] extends SaveValue[Expr] with SaveAddr[Expr] with SaveMapToArray with GlobalStore[Expr]: + override def saveInfo: List[(String, Savable[_])] = + return super.saveInfo ++ List(("store" -> Savable(store))) diff --git a/code/shared/src/main/scala/maf/persistence/save/Util.scala b/code/shared/src/main/scala/maf/persistence/save/Util.scala new file mode 100644 index 000000000..adc2b1e52 --- /dev/null +++ b/code/shared/src/main/scala/maf/persistence/save/Util.scala @@ -0,0 +1,41 @@ +package maf.save + +import io.bullet.borer.Encoder +import io.bullet.borer.Writer + +/** + * Trait to encode a map using an array. + * + * This will save your map in an array with alternating keys and values: + * {{{ + * [ + * < key1 >, + * < value1 >, + * < key2 >, + * < value2 >, + * ... + * ] + * }}} + * This can, for example be used if the key is not a string, and can therefore not be used as a key of a JSON map. + */ +trait SaveMapToArray: + /** + * Encodes a map using an array. + * + * This will save your map in an array with alternating keys and values: + * {{{ + * [ + * < key1 >, + * < value1 >, + * < key2 >, + * < value2 >, + * ... + * ] + * }}} + * This can, for example be used if the key is not a string, and can therefore not be used as a key of a JSON map. + */ + given mapKeyEncoder[K, V](using keyEncoder: Encoder[K], valueEncoder: Encoder[V]): ArrayKeyEncoder[Map[K, V]] with + override def write(writer: Writer, map: Map[K, V]): Writer = + writer.start() + for (key, value) <- map do writer.writeMember(key, value)(using keyEncoder, valueEncoder) + writer.close() diff --git a/code/shared/src/main/scala/maf/util/ColouredFormatting.scala b/code/shared/src/main/scala/maf/util/ColouredFormatting.scala index 8dd2577cd..983c381a8 100644 --- a/code/shared/src/main/scala/maf/util/ColouredFormatting.scala +++ b/code/shared/src/main/scala/maf/util/ColouredFormatting.scala @@ -1,5 +1,5 @@ package maf.util -import io.AnsiColor.* +import scala.io.AnsiColor.* object ColouredFormatting { // Wrapper for easily accessing https://www.scala-lang.org/api/2.13.5/scala/io/AnsiColor.html diff --git a/code/shared/src/test/scala/maf/test/persistence/Analysis.scala b/code/shared/src/test/scala/maf/test/persistence/Analysis.scala new file mode 100644 index 000000000..ae95de290 --- /dev/null +++ b/code/shared/src/test/scala/maf/test/persistence/Analysis.scala @@ -0,0 +1,715 @@ +package maf.test.persistence + +import io.bullet.borer +import io.bullet.borer.Decoder +import io.bullet.borer.Encoder +import io.bullet.borer.Json +import io.bullet.borer.Writer +import maf.core.Address +import maf.core.Expression +import maf.core.Identifier +import maf.core.NoCodeIdentityDebug +import maf.core.Position +import maf.language.CScheme.CSchemeParser +import maf.language.scheme.SchemeBegin +import maf.language.scheme.SchemeExp +import maf.language.scheme.SchemeLambdaExp +import maf.language.scheme.SchemeVar +import maf.modular.AnalysisEntry +import maf.modular.ModAnalysis +import maf.modular.scheme.SchemeConstantPropagationDomain +import maf.modular.scheme.modf.SchemeModFNoSensitivity +import maf.modular.scheme.modf.SimpleSchemeModFAnalysis +import maf.modular.worklist.FIFOWorklistAlgorithm +import maf.save.AbstractDecoder +import maf.save.AbstractEncoder +import maf.save.ArrayDecoder +import maf.save.ArrayEncoder +import maf.save.ArrayKeyEncoder +import maf.save.Load +import maf.save.LoadActualComponents +import maf.save.LoadActualExpressions +import maf.save.LoadComponents +import maf.save.LoadExpressions +import maf.save.LoadSchemeExpressions +import maf.save.LoadStandardSchemeComponents +import maf.save.MapDecoder +import maf.save.MapEncoder +import maf.save.Save +import maf.save.SaveActualComponents +import maf.save.SaveComponents +import maf.save.SaveStandardSchemeComponents +import maf.save.save.SaveActualExpressions +import maf.save.save.SaveExpressions +import maf.save.save.SaveSchemeExpressions +import maf.util.Reader +import maf.util.benchmarks.Timeout.T +import org.scalacheck.Gen +import org.scalacheck.Prop._ +import org.scalatest.GivenWhenThen +import org.scalatest.matchers.should.Matchers +import org.scalatest.prop.TableDrivenPropertyChecks +import org.scalatest.propspec.AnyPropSpec +import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks + +import java.nio.file.Files +import java.nio.file.Path +import java.nio.file.Paths +import scala.jdk.CollectionConverters.* +import maf.save.SaveValue +import maf.save.LoadValue +import maf.core.Lattice +import maf.core.Identity +import maf.core.SimpleIdentity +import maf.save.SaveContext +import maf.save.LoadContext +import maf.save.SaveAddr +import maf.save.LoadAddr +import maf.modular.scheme.PrmAddr +import maf.core.Environment +import maf.language.scheme.SchemeLambdaExp +import maf.core.Address +import maf.core.Position +import maf.modular.scheme.PtrAddr +import maf.core.Identifier +import maf.modular.scheme.VarAddr +import maf.modular.scheme.modf.BaseEvalM +import maf.modular.scheme.modf.SchemeModFComponent +import maf.modular.scheme.modf.BaseSchemeModFSemanticsM +import maf.core.BasicEnvironment +import maf.language.scheme.SchemeLambda +import maf.save.SaveModF +import maf.save.LoadModF +import maf.save.SaveEnvironment +import maf.save.LoadEnvironment +import maf.modular.GlobalStore +import maf.save.SaveInitialized +import maf.save.SaveWorklist +import maf.save.SaveGlobalStore +import maf.save.SaveDependency +import maf.save.SaveAddrDep +import maf.save.SaveModularSchemeDomain +import maf.save.SaveSchemeConstantPropagationDomain +import maf.save.SaveSchemeAddr +import maf.save.SaveNoContext +import maf.save.SaveSequentialWorklist +import maf.save.LoadInitialized +import maf.save.LoadWorklist +import maf.save.LoadGlobalStore +import maf.save.LoadDependency +import maf.save.LoadAddrDependency +import maf.save.LoadModularSchemeDomain +import maf.save.LoadSchemeConstantPropagationDomain +import maf.save.LoadSchemeAddr +import maf.save.LoadFIFOWorklist +import maf.save.LoadNoContext +import maf.save.SaveStandardSchemeComponentPosition +import maf.save.LoadStandardSchemeComponentPosition +import maf.save.SaveComponentIntID +import maf.save.LoadComponentIntID +import maf.save.save.SaveRecursiveSchemeExpressionsIntID +import maf.save.LoadExpressionIntID +import maf.test.persistence.PersistenceSpecAnalysises.SimpleModFActual +import maf.test.persistence.PersistenceSpecAnalysises.SimpleModFPositionComponents +import maf.test.persistence.PersistenceSpecAnalysises.SimpleModFIDComponents +import maf.test.persistence.PersistenceSpecAnalysises.SimpleModFIDExpressions +import maf.test.persistence.PersistenceSpecAnalysises.SimpleModFIDs +import org.scalatest.compatible.Assertion +import maf.save.save.SaveExpressionIntID +import maf.test.persistence.PersistenceSpecAnalysises.SimpleModFIDSchemeExpressions +import maf.test.persistence.PersistenceSpecAnalysises.SimpleModFActualCbor +import maf.test.persistence.PersistenceSpecAnalysises.SimpleModFPositionComponentsCbor +import maf.test.persistence.PersistenceSpecAnalysises.SimpleModFIDComponentsCbor +import maf.test.persistence.PersistenceSpecAnalysises.SimpleModFIDExpressionsCbor +import maf.test.persistence.PersistenceSpecAnalysises.SimpleModFIDSchemeExpressionsCbor +import maf.test.persistence.PersistenceSpecAnalysises.SimpleModFIDsCbor +import maf.save.SaveCbor +import maf.save.LoadCbor +import maf.save.save.SaveMainSchemeBody +import maf.save.LoadMainSchemeBody + +trait Generator: + protected case class StringValue(str: String) + protected val optionStr = Gen.option(Gen.alphaLowerStr) + protected val str = Gen.alphaLowerStr + val stringAddr: Gen[Address] = str.map(str => new PrmAddr(str)) + val stringValues = functionGen(stringValue) + val stringContexts = functionGen(stringValue) + val stringComponents = functionGen(stringValue) + val stringEnvironments = stringAddr.map(addr => new BasicEnvironment[Address](Map((str.sample.get, addr)))) + + def stringValue(): StringValue = return new StringValue(str.sample.get) + + def functionGen[T](f: () => T): Gen[T] = Gen.const("").map(_ => f()) + protected val maxDepth = 5 + protected var depth = 0 + def maxDepthFunctionGen[T](generate: () => T, retry: () => T): Gen[T] = + Gen.const("") + .map(_ => + if depth < maxDepth then + depth += 1 + val res = generate() + depth -= 1 + res + else retry() + ) + +object PersistenceSpec: + def simpleIdentity(tag: String): Identity = new SimpleIdentity(Position.Position(-2, 0, new Position.SimplePTag(tag))) + def simpleExpression(tag: String): Expression = Identifier("", new SimpleIdentity(Position.Position(-2, 0, new Position.SimplePTag(tag)))) + def simpleSchemeExpression(tag: String): SchemeExp = SchemeVar( + Identifier("", simpleIdentity(tag)) + ) + def simpleSchemeLambdaExpression(tag: String): SchemeLambdaExp = + new SchemeLambda(None, List(), List(simpleSchemeExpression(tag)), None, simpleIdentity(tag)) + +trait PersistenceSpec extends AnyPropSpec with ScalaCheckPropertyChecks with TableDrivenPropertyChecks with GivenWhenThen with Matchers with Generator: + + /** + * Class used for getting access to the encoding/decoding givens. + * + * @note + * This analysis shouldn't be used to run actual analyses since the run method is a noop. + */ + class TestAnalysis extends ModAnalysis[Expression](PersistenceSpec.simpleExpression(str.sample.get)) with Save[Expression] with Load[Expression]: + override def finished: Boolean = return true + override def intraAnalysis(component: Component): IntraAnalysis = ??? + override def addToWorkList(cmp: Component): Unit = ??? + override def initialComponent: Component = ??? + override def expr(cmp: Component): Expression = ??? + override protected def run(timeout: T): Unit = return + + /** + * Class used for getting access to the encoding/decoding givens using scheme expressions. + * + * @note + * This analysis shouldn't be used to run actual analyses since the run method is a noop. + */ + class TestSchemeAnalysis + extends ModAnalysis[SchemeExp](PersistenceSpec.simpleSchemeExpression(str.sample.get)) + with Save[SchemeExp] + with Load[SchemeExp]: + override def finished: Boolean = return true + override def intraAnalysis(component: Component): IntraAnalysis = ??? + override def addToWorkList(cmp: Component): Unit = ??? + override def initialComponent: Component = ??? + override def expr(cmp: Component): SchemeExp = ??? + override protected def run(timeout: T): Unit = return + + abstract class TestBaseSchemeModFSemanticsAnalysis extends TestSchemeAnalysis with BaseSchemeModFSemanticsM: + override def expr(cmp: Component): SchemeExp = ??? + override def intraAnalysis(cmp: Component): SchemeModFSemanticsIntra = ??? + override def allocCtx( + clo: (SchemeLambdaExp, Environment[Address]), + args: List[Value], + call: Position.Position, + caller: Component + ): ComponentContext = + ??? + override def allocPtr(exp: SchemeExp, cmp: Component): PtrAddr[AllocationContext] = ??? + override def allocVar(id: Identifier, cmp: Component): VarAddr[AllocationContext] = ??? + override def baseEnv: Env = ??? + implicit override lazy val baseEvalM: BaseEvalM[M] = ??? + override def newComponent(call: SchemeModFComponent.Call[ComponentContext]): Component = ??? + override def view(cmp: Component): SchemeModFComponent = ??? + // Members declared in maf.modular.GlobalStore + override def store: Map[Addr, Value] = ??? + override def store_=(store: Map[Addr, Value]): Unit = ??? + + trait SaveStringContext[Expr <: Expression] extends SaveContext[Expr]: + override type EncodeContext = StringValue + override given contextEncoder: Encoder[EncodeContext] with + override def write(writer: Writer, context: EncodeContext): Writer = + writer.writeMapOpen(1) + writer.write("==TESTING== NO CONTEXT") + writer.write(context.str) + writer.writeMapClose() + + trait LoadStringContext[Expr <: Expression] extends LoadContext[Expr]: + override type DecodeContext = StringValue + override given contextDecoder: Decoder[DecodeContext] with + override def read(reader: borer.Reader): DecodeContext = + reader.readMapOpen(1) + if !reader.tryReadString("==TESTING== NO CONTEXT") then return reader.unexpectedDataItem("==TESTING== NO CONTEXT") + val str = reader.readString() + reader.readBreak() + return StringValue(str) + + trait SaveStringValue[Expr <: Expression] extends SaveValue[Expr]: + override type Value = StringValue + implicit override lazy val lattice: Lattice[Value] = ??? + override given valueEncoder: Encoder[Value] with + override def write(writer: Writer, value: Value): Writer = + writer.writeMapOpen(1) + writer.write("==TESTING== NO VALUE") + writer.write(value.str) + writer.writeMapClose() + + trait LoadStringValue[Expr <: Expression] extends LoadValue[Expr]: + override type Value = StringValue + implicit override lazy val lattice: Lattice[Value] = ??? + override given valueDecoder: Decoder[Value] with + override def read(reader: borer.Reader): Value = + reader.readMapOpen(1) + if !reader.tryReadString("==TESTING== NO VALUE") then return reader.unexpectedDataItem("==TESTING== NO VALUE") + val str = reader.readString() + reader.readBreak() + return StringValue(str) + + trait SaveStringComponent[Expr <: Expression] extends SaveComponents[Expr]: + override type Component = StringValue + override given componentEncoder: Encoder[Component] with + override def write(writer: Writer, component: Component): Writer = + writer.writeMapOpen(1) + writer.write("==TESTING== NO COMPONENT") + writer.write(component.str) + writer.writeMapClose() + + trait LoadStringComponent[Expr <: Expression] extends LoadComponents[Expr]: + override type Component = StringValue + override given componentDecoder: Decoder[Component] with + override def read(reader: borer.Reader): Component = + reader.readMapOpen(1) + if !reader.tryReadString("==TESTING== NO COMPONENT") then return reader.unexpectedDataItem("==TESTING== NO COMPONENT") + val str = reader.readString() + reader.readBreak() + return StringValue(str) + + trait SaveStringExpression[Expr <: Expression] extends SaveExpressions[Expr]: + override given expressionEncoder: Encoder[Expr] with + override def write(writer: Writer, expression: Expr): Writer = + writer.writeMapOpen(1) + if expression.isInstanceOf[SchemeLambdaExp] then writer.write("==TESTING== NO LAMBDA EXPRESSION") + else writer.write("==TESTING== NO EXPRESSION") + writer.write(expression.idn.pos.tag.show) + writer.writeMapClose() + + trait LoadStringExpression[Expr <: Expression] extends LoadExpressions[Expr]: + def simpleExpression(name: String): Expr + def simpleLambdaExpression(name: String): Expr + override given expressionDecoder: Decoder[Expr] with + override def read(reader: borer.Reader): Expr = + reader.readMapOpen(1) + val lambdaExp = reader.tryReadString("==TESTING== NO LAMBDA EXPRESSION") + if !lambdaExp && !reader.tryReadString("==TESTING== NO EXPRESSION") then return reader.unexpectedDataItem("==TESTING== NO EXPRESSION") + val str = reader.readString() + reader.readBreak() + return if lambdaExp then simpleLambdaExpression(str) else simpleExpression(str) + + trait SaveStringAddr[Expr <: Expression] extends SaveAddr[Expr]: + override given addressEncoder: Encoder[Address] with + override def write(writer: Writer, addr: Address): Writer = + writer.writeMapOpen(1) + writer.write("==TESTING== NO ADDRESS") + writer.write(addr.asInstanceOf[PrmAddr].nam) + writer.writeBreak() + + trait LoadStringAddr[Expr <: Expression] extends LoadAddr[Expr]: + override def addressDecoders: Set[(String, Decoder[? <: Address])] = + Set(("==TESTING== NO ADDRESS", summon[Decoder[PrmAddr]])) + given Decoder[PrmAddr] with + override def read(reader: borer.Reader): PrmAddr = + val str = reader.readString() + return new PrmAddr(str) + + trait LoadStringSchemeExpression extends LoadStringExpression[SchemeExp]: + override def simpleExpression(tag: String): SchemeExp = PersistenceSpec.simpleSchemeExpression(tag) + override def simpleLambdaExpression(tag: String): SchemeExp = PersistenceSpec.simpleSchemeLambdaExpression(tag) + + trait SaveStringEnvironment[Expr <: Expression] extends SaveEnvironment[Expr] with SaveStringAddr[Expr] + trait LoadStringEnvironment[Expr <: Expression] extends LoadEnvironment[Expr] with LoadStringAddr[Expr] + + /** + * Test whether an object can be encoded/decoded and whether or not the decoded value is equal to the original object. + * + * @param name + * The name of the object + * @param object + * The object to test + * @param codec + * The encoder and decoder for these objects + */ + def testEncodingDecoding[T, ASSERTION]( + name: String, + obj: T, + codec: () => (Encoder[T], Decoder[T]), + eq: (original: T, decoded: T) => ASSERTION + ): Unit = + property(s"A ${name} should be encoded") { + val encoded = Json.encode(obj)(using codec()._1).toByteArray + encoded.length should be > (0) + } + + property(s"A ${name} should be decoded") { + val encoded = Json.encode(obj)(using codec()._1).toByteArray + val decoded = Json.decode(encoded).to[T](using codec()._2) + decoded.valueTry.isSuccess should be(true) + } + + property(s"A ${name} should remain the same when encoding and decoding") { + val encoded = Json.encode(obj)(using codec()._1).toByteArray + val decoded = Json.decode(encoded).to[T](using codec()._2) + decoded.valueTry.isSuccess should be(true) + eq(obj, decoded.value) + } + + /** + * Test whether a type can be encoded/decoded and whether or not the decoded value is equal to the original object. + * + * @param name + * The name of the object + * @param objects + * A generator to create new objects + * @param codec + * The encoder and decoder for these objects + */ + def testEncodingDecoding[T, ASSERTION]( + name: String, + objects: Gen[T], + codec: () => (Encoder[T], Decoder[T]), + eq: (original: T, decoded: T) => ASSERTION, + print: Boolean = true + ): Unit = + property(s"A ${name} should be encoded") { + forAll(objects) { (obj: T) => + if print then Given(name + ": " + obj.toString()) + val encoded = Json.encode(obj)(using codec()._1).toByteArray + encoded.length should be > (0) + } + } + + property(s"A ${name} should be decoded") { + forAll(objects) { (obj: T) => + if print then Given(name + ": " + obj.toString()) + val encoded = Json.encode(obj)(using codec()._1).toByteArray + val decoded = Json.decode(encoded).to[T](using codec()._2) + decoded.valueTry.isSuccess should be(true) + } + } + + property(s"A ${name} should remain the same when encoding and decoding") { + forAll(objects) { (obj: T) => + if print then Given(name + ": " + obj.toString()) + val encoded = Json.encode(obj)(using codec()._1).toByteArray + val decoded = Json.decode(encoded).to[T](using codec()._2) + eq(obj, decoded.value) + } + } + +class PersistAnalysisSpec extends PersistenceSpec: + val programs = Gen.oneOf[Path](getFilesList("test/R5RS/ad").appendedAll(getFilesList("test/R5RS/various"))) + + def getFilesList(folder: String): List[Path] = + val programsStream = Files.list(Paths.get(folder)) + return if programsStream == null then List() else programsStream.iterator().nn.asScala.toList + + class ContextInsensitiveSchemeAnalysis(program: SchemeExp) + extends SaveAnalysis(program) + with SchemeModFNoSensitivity + with SchemeConstantPropagationDomain + with FIFOWorklistAlgorithm[SchemeExp] + with SaveModF + with LoadModF + + abstract class SaveAnalysis(program: SchemeExp) + extends SimpleSchemeModFAnalysis(program) + with AnalysisEntry[SchemeExp] + with Save[SchemeExp] + with Load[SchemeExp] + + private def save[Analysis <: AnalysisEntry[SchemeExp]](program: SchemeExp, analysis: Analysis): Path = + import analysis.given + analysis.analyze() + val saveFile = Files.createTempFile("maf", ".json") + require(saveFile != null) + analysis.save(saveFile.toString()) + return saveFile.asInstanceOf[Path] + + private def load[Analysis <: AnalysisEntry[_]](analysis: Analysis, saveFile: Path): Analysis = + import analysis.given + analysis.load(saveFile.toString()) + return analysis + + private def testSchemePrograms[ASSERTION, Analysis <: ModAnalysis[SchemeExp]]( + anl: (program: SchemeExp) => Analysis, + testCorrectness: (result: Analysis, loadedResult: Analysis) => ASSERTION, + runTest: ((Path) => Unit) => Unit, + limitHeight: Int + ): Unit = + runTest((path: Path) => + val program = CSchemeParser.parseProgram(Reader.loadFile(path.toString())) + whenever(program.height < limitHeight) { + Given(path.toString()) + val analysis = anl(program) + val saveFile = save(program, analysis) + + val loadAnalysis = load(anl(program), saveFile) + + Files.deleteIfExists(saveFile) + testCorrectness(analysis, loadAnalysis) + } + ) + + private def testAnalysis[ASSERTION, Analysis <: GlobalStore[SchemeExp]]( + name: String, + anl: (program: SchemeExp) => GlobalStore[SchemeExp] & BaseSchemeModFSemanticsM, + runTest: ((Path) => Unit) => Unit = (test: (Path) => Unit) => forAll(programs) { (path: Path) => test(path) }, + limitHeight: Int = Int.MaxValue + ) = + property(s"A programs result should remain the same when encoded and decoded for a $name") { + testSchemePrograms( + anl, + (result: GlobalStore[SchemeExp], loadedResult: GlobalStore[SchemeExp]) => + loadedResult.result shouldBe defined + loadedResult.result.get should equal(result.result.get), + runTest, + limitHeight + ) + } + + property(s"A programs components should remain the same when encoded and decoded for a $name") { + testSchemePrograms( + anl, + (result: GlobalStore[SchemeExp], loadedResult: GlobalStore[SchemeExp]) => + loadedResult.visited.size should equal(result.visited.size) + // This is done inside of a loop to improve the errors given when a test fails + for component <- result.visited do loadedResult.visited should contain(component), + runTest, + limitHeight + ) + } + + property(s"A programs dependencies should remain the same when encoded and decoded for a $name") { + testSchemePrograms( + anl, + (result: GlobalStore[SchemeExp], loadedResult: GlobalStore[SchemeExp]) => + loadedResult.deps.size should equal(result.deps.size) + // This is done inside of a loop to improve the errors given when a test fails + for dependency <- result.deps.keysIterator do + loadedResult.deps.keySet should contain(dependency) + loadedResult.deps.get(dependency) should equal(result.deps.get(dependency)) + , + runTest, + limitHeight + ) + } + + property(s"A programs store should remain the same when encoded and decoded for a $name") { + testSchemePrograms( + anl, + (result: GlobalStore[SchemeExp], loadedResult: GlobalStore[SchemeExp]) => + loadedResult.store.size should equal(result.store.size) + // This is done inside of a loop to improve the errors given when a test fails + for addr <- result.store.keySet do + loadedResult.store.keySet should contain(addr) + loadedResult.store.get(addr) should equal(result.store.get(addr)) + , + runTest, + limitHeight + ) + } + + testAnalysis("context insensitive modf analysis saving the actual expressions and components", + (program: SchemeExp) => new SimpleModFActual(program), + limitHeight = 20 + ) + + testAnalysis( + "context insensitive modf analysis saving the components as their positions and actual expressions", + (program: SchemeExp) => new SimpleModFPositionComponents(program), + limitHeight = 20 + ) + + testAnalysis( + "context insensitive modf analysis saving the components as integer IDs and actual expressions", + (program: SchemeExp) => new SimpleModFIDComponents(program), + limitHeight = 20 + ) + + testAnalysis( + "context insensitive modf analysis saving the actual components and expressions as integer IDs", + (program: SchemeExp) => new SimpleModFIDExpressions(program), + limitHeight = 20 + ) + + var maxADTHeight = 0 + testAnalysis( + s"context insensitive modf analysis saving the expressions as integer IDs recursively and saving the actual components", + (program: SchemeExp) => new SimpleModFIDSchemeExpressions(program, maxADTHeight), + (test: (Path) => Unit) => + forAll(programs, Gen.chooseNum(0, 15))({ (path: Path, maxADTHeight: Int) => + this.maxADTHeight = maxADTHeight + Given(s"max ADT height: $maxADTHeight") + test(path) + }) + ) + + testAnalysis( + s"context insensitive modf analysis saving the expressions recursively as integer IDs and components as integer IDs", + (program: SchemeExp) => new SimpleModFIDs(program, maxADTHeight), + (test: (Path) => Unit) => + forAll(programs, Gen.chooseNum(0, 15))({ (path: Path, maxADTHeight: Int) => + this.maxADTHeight = maxADTHeight + Given(s"max ADT height: $maxADTHeight") + test(path) + }) + ) + + testAnalysis( + "context insensitive modf analysis saving the actual expressions and components in CBOR", + (program: SchemeExp) => new SimpleModFActualCbor(program), + limitHeight = 500 + ) + + testAnalysis( + "context insensitive modf analysis saving the components as their positions and actual expressions in CBOR", + (program: SchemeExp) => new SimpleModFPositionComponentsCbor(program), + limitHeight = 500 + ) + + testAnalysis( + "context insensitive modf analysis saving the components as integer IDs and actual expressions in CBOR", + (program: SchemeExp) => new SimpleModFIDComponentsCbor(program), + limitHeight = 500 + ) + + testAnalysis( + "context insensitive modf analysis saving the actual components and expressions as integer IDs in CBOR", + (program: SchemeExp) => new SimpleModFIDExpressionsCbor(program), + limitHeight = 500 + ) + + testAnalysis( + s"context insensitive modf analysis saving the expressions as integer IDs recursively and saving the actual components in CBOR", + (program: SchemeExp) => new SimpleModFIDSchemeExpressionsCbor(program, maxADTHeight), + (test: (Path) => Unit) => + forAll(programs, Gen.chooseNum(0, 100))({ (path: Path, maxADTHeight: Int) => + this.maxADTHeight = maxADTHeight + Given(s"max ADT height: $maxADTHeight") + test(path) + }) + ) + + testAnalysis( + s"context insensitive modf analysis saving the expressions recursively as integer IDs and components as integer IDs in CBOR", + (program: SchemeExp) => new SimpleModFIDsCbor(program, maxADTHeight), + (test: (Path) => Unit) => + forAll(programs, Gen.chooseNum(0, 100))({ (path: Path, maxADTHeight: Int) => + this.maxADTHeight = maxADTHeight + Given(s"max ADT height: $maxADTHeight") + test(path) + }) + ) + +object PersistenceSpecAnalysises: + trait SaveSpec[Expr <: Expression] + extends Save[Expr] + with SaveInitialized[Expr] + with SaveComponents[Expr] + with SaveWorklist[Expr] + with SaveGlobalStore[Expr] + with SaveDependency[Expr] + with SaveAddrDep[Expr] + + trait SaveModF + extends SaveSpec[SchemeExp] + with SaveStandardSchemeComponents + with SaveModularSchemeDomain + with SaveSchemeConstantPropagationDomain + with SaveSchemeAddr + with SaveSchemeExpressions + with SaveNoContext[SchemeExp] + with SaveSequentialWorklist[SchemeExp] + with SaveMainSchemeBody + + trait LoadSpec[Expr <: Expression] + extends Load[Expr] + with LoadInitialized[Expr] + with LoadComponents[Expr] + with LoadWorklist[Expr] + with LoadGlobalStore[Expr] + with LoadDependency[Expr] + with LoadAddrDependency[Expr] + + trait LoadModF + extends LoadSpec[SchemeExp] + with LoadStandardSchemeComponents + with LoadModularSchemeDomain + with LoadSchemeConstantPropagationDomain + with LoadSchemeAddr + with LoadFIFOWorklist[SchemeExp] + with LoadSchemeExpressions + with LoadNoContext[SchemeExp] + with LoadMainSchemeBody + + trait SimpleModF + extends SimpleSchemeModFAnalysis + with SchemeModFNoSensitivity + with SchemeConstantPropagationDomain + with FIFOWorklistAlgorithm[SchemeExp] + with SaveModF + with LoadModF + + class SimpleModFActual(program: SchemeExp) + extends SimpleSchemeModFAnalysis(program) + with SaveActualExpressions[SchemeExp] + with LoadActualExpressions[SchemeExp] + with SaveActualComponents[SchemeExp] + with LoadActualComponents[SchemeExp] + with SimpleModF + + class SimpleModFPositionComponents(program: SchemeExp) + extends SimpleSchemeModFAnalysis(program) + with SaveActualExpressions[SchemeExp] + with LoadActualExpressions[SchemeExp] + with SaveStandardSchemeComponentPosition + with LoadStandardSchemeComponentPosition + with SimpleModF + + class SimpleModFIDComponents(program: SchemeExp) + extends SimpleSchemeModFAnalysis(program) + with SaveActualExpressions[SchemeExp] + with LoadActualExpressions[SchemeExp] + with SaveComponentIntID[SchemeExp] + with LoadComponentIntID[SchemeExp] + with SimpleModF + + class SimpleModFIDExpressions(program: SchemeExp) + extends SimpleSchemeModFAnalysis(program) + with SaveExpressionIntID[SchemeExp] + with LoadExpressionIntID[SchemeExp] + with SaveActualComponents[SchemeExp] + with LoadActualComponents[SchemeExp] + with SimpleModF + + class SimpleModFIDSchemeExpressions(program: SchemeExp, override val maxASTHeight: Int) + extends SimpleSchemeModFAnalysis(program) + with SaveRecursiveSchemeExpressionsIntID + with LoadExpressionIntID[SchemeExp] + with SaveActualComponents[SchemeExp] + with LoadActualComponents[SchemeExp] + with SimpleModF + + class SimpleModFIDs(program: SchemeExp, override val maxASTHeight: Int) + extends SimpleSchemeModFAnalysis(program) + with SaveRecursiveSchemeExpressionsIntID + with LoadExpressionIntID[SchemeExp] + with SaveComponentIntID[SchemeExp] + with LoadComponentIntID[SchemeExp] + with SimpleModF + + class SimpleModFActualCbor(program: SchemeExp) extends SimpleModFActual(program) with SaveCbor[SchemeExp] with LoadCbor[SchemeExp] + class SimpleModFPositionComponentsCbor(program: SchemeExp) + extends SimpleModFPositionComponents(program) + with SaveCbor[SchemeExp] + with LoadCbor[SchemeExp] + class SimpleModFIDComponentsCbor(program: SchemeExp) extends SimpleModFIDComponents(program) with SaveCbor[SchemeExp] with LoadCbor[SchemeExp] + class SimpleModFIDExpressionsCbor(program: SchemeExp) extends SimpleModFIDExpressions(program) with SaveCbor[SchemeExp] with LoadCbor[SchemeExp] + class SimpleModFIDSchemeExpressionsCbor(program: SchemeExp, maxASTHeight: Int) + extends SimpleModFIDSchemeExpressions(program, maxASTHeight) + with SaveCbor[SchemeExp] + with LoadCbor[SchemeExp] + class SimpleModFIDsCbor(program: SchemeExp, maxASTHeight: Int) + extends SimpleModFIDs(program, maxASTHeight) + with SaveCbor[SchemeExp] + with LoadCbor[SchemeExp] diff --git a/code/shared/src/test/scala/maf/test/persistence/Component.scala b/code/shared/src/test/scala/maf/test/persistence/Component.scala new file mode 100644 index 000000000..0f9f11ab2 --- /dev/null +++ b/code/shared/src/test/scala/maf/test/persistence/Component.scala @@ -0,0 +1,198 @@ +package maf.test.persistence + +import org.scalatest.flatspec.AnyFlatSpec +import maf.core.Position.Position +import io.bullet.borer.Json +import maf.save.SavePosition +import maf.language.scheme.SchemeExp +import maf.modular.ModAnalysis +import maf.save.AbstractEncoder +import maf.save.MapEncoder +import maf.core.Expression +import org.scalatest.BeforeAndAfterEach +import maf.core.Identifier +import maf.core.NoCodeIdentityDebug +import maf.save.ArrayEncoder +import maf.save.ArrayKeyEncoder +import maf.save.LoadPosition +import maf.save.AbstractDecoder +import maf.save.MapDecoder +import org.scalatest.Assertions._ +import org.scalatest.propspec.AnyPropSpec +import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks +import org.scalacheck.Gen +import maf.core.Position.PTag +import maf.core.Position.NoPTag +import org.scalatest.GivenWhenThen +import maf.core.Position.SimplePTag +import maf.core.Position.PTagWithSource +import maf.core.Position.SourcePathTag +import org.scalatest.matchers.should.Matchers +import org.scalatest.prop.TableDrivenPropertyChecks +import io.bullet.borer.Encoder +import maf.save.ArrayDecoder +import io.bullet.borer.Decoder +import maf.core.BasicEnvironment +import maf.core.NestedEnv +import maf.save.LoadContext +import maf.save.SaveContext +import maf.save.SaveNoContext +import maf.save.LoadNoContext +import maf.modular.scheme.modf.NoContext +import maf.core.NoCodeIdentity +import maf.modular.scheme.modf.SchemeModFComponent +import maf.core.Address +import maf.save.SaveEnvironment +import maf.save.LoadEnvironment +import maf.core.Environment +import maf.save.SaveStandardSchemeComponents +import maf.save.LoadStandardSchemeComponents +import maf.save.SaveActualComponents +import maf.save.LoadActualComponents +import maf.core.worklist.FIFOWorkList +import scala.collection.immutable.Queue +import maf.save.SaveWorklist +import maf.save.LoadFIFOWorklist +import maf.core.worklist.WorkList +import maf.util.benchmarks.Timeout.T +import maf.save.SaveSequentialWorklist + +trait ComponentGenerator extends Generator: + val pTags = Gen.alphaLowerStr.map((str) => + Gen.oneOf[PTag](NoPTag, new SimplePTag(str), new PTagWithSource(str, Gen.alphaLowerStr.sample.get), new SourcePathTag(str)).sample.get + ) + + val positions = (for + x <- Gen.posNum[Int] + y <- Gen.posNum[Int] + ptag <- pTags + yield Position(x, y, ptag)) + + val identities = Gen.oneOf(NoCodeIdentity, NoCodeIdentityDebug) + val identifiers = Gen.alphaLowerStr.map((str) => new Identifier(str, identities.sample.get)) + + var schemeModFComponents = Gen.oneOf[SchemeModFComponent]( + SchemeModFComponent.Main, + str.map(str => + new SchemeModFComponent.Call[StringValue]((PersistenceSpec.simpleSchemeLambdaExpression(str), stringEnvironments.sample.get), + stringContexts.sample.get + ) + ) + ) + +trait EnvironmentGenerator extends Generator: + val environments = Gen.oneOf( + str.map(str => new BasicEnvironment[Address](Map((str, stringAddr.sample.get)))), + str.map(str => new NestedEnv[Address, Address](Map((str, stringAddr.sample.get)), Gen.option(stringAddr.sample.get).sample.get)) + ) + +class PersistEnvironmentSpec extends PersistenceSpec with EnvironmentGenerator: + class EnvironmentAnalysis + extends TestAnalysis + with SaveEnvironment[Expression] + with LoadEnvironment[Expression] + with SaveStringAddr[Expression] + with LoadStringAddr[Expression] + + testEncodingDecoding( + "environments", + environments, + () => + val analysis = new EnvironmentAnalysis + import analysis.given + (summon[Encoder[Environment[Address]]], summon[Decoder[Environment[Address]]]), + (original: Environment[Address], decoded: Environment[Address]) => decoded should be(original) + ) + +trait WorklistGenerator extends Generator: + val FIFOWorklists = Gen.listOfN(10, stringComponents).map(comps => new FIFOWorkList[StringValue](Queue(), Set()).addAll(comps)) + +class PersistWorklistSpec extends PersistenceSpec with WorklistGenerator: + class FIFOWorklistAnalysis + extends TestAnalysis + with SaveSequentialWorklist[Expression] + with LoadFIFOWorklist[Expression] + with SaveStringComponent[Expression] + with LoadStringComponent[Expression]: + override def addToWorkList(cmp: Component) = workList = workList.add(cmp) + override def finished: Boolean = return true + override def run(timeout: T): Unit = return + override def initialComponent: Component = StringValue("INITIAL") + + testEncodingDecoding( + "FIFO worklist", + FIFOWorklists, + () => + val analysis = new FIFOWorklistAnalysis + import analysis.given + (analysis.worklistEncoder, analysis.worklistDecoder), + (original: WorkList[_], decoded: WorkList[_]) => + decoded.toList.size should equal(original.toList.size) + var orig = original + var dec = decoded + while !orig.isEmpty do + dec.isEmpty should be(false) + dec.head should equal(orig.head) + orig = orig.tail + dec = dec.tail + dec.isEmpty should be(true) + ) + +class PersistComponentSpec extends PersistenceSpec with ComponentGenerator: + class PositionAnalysis extends TestAnalysis with SavePosition[Expression] with LoadPosition[Expression] + testEncodingDecoding( + "position", + positions, + () => + val analysis = new PositionAnalysis + import analysis.given + (summon[Encoder[Position]], summon[Decoder[Position]]), + (original: Position, decoded: Position) => + decoded.line should be(original.line) + decoded.col should be(original.col) + decoded.tag should be(original.tag) + ) + + class SchemeModFComponentAnalysis + extends TestBaseSchemeModFSemanticsAnalysis + with SaveStandardSchemeComponents + with LoadStandardSchemeComponents + with SaveActualComponents[SchemeExp] + with LoadActualComponents[SchemeExp] + with SaveStringEnvironment[SchemeExp] + with LoadStringEnvironment[SchemeExp] + with SaveStringContext[SchemeExp] + with LoadStringContext[SchemeExp] + with SaveStringExpression[SchemeExp] + with LoadStringSchemeExpression: + override lazy val initialComponent: Component = SchemeModFComponent.Main + override def newComponent(call: SchemeModFComponent.Call[ComponentContext]): Component = ??? + override def view(cmp: Component): SchemeModFComponent = ??? + // Members declared in maf.modular.scheme.SchemeDomain + implicit override lazy val lattice: maf.language.scheme.lattices.SchemeLattice[Value, maf.core.Address] = ??? + override lazy val primitives: maf.language.scheme.primitives.SchemePrimitives[Value, maf.core.Address] = ??? + + testEncodingDecoding( + "scheme modF components", + schemeModFComponents, + () => + val analysis = new SchemeModFComponentAnalysis + import analysis.given + (analysis.componentEncoder, analysis.componentDecoder), + (original: SchemeModFComponent, decoded: SchemeModFComponent) => decoded should be(original) + ) + +trait ContextGenerator: + val contexts = Gen.const(NoContext) + +class PersistContextSpec extends PersistenceSpec with ContextGenerator: + class NoContextAnalysis extends TestAnalysis with SaveNoContext[Expression] with LoadNoContext[Expression] + testEncodingDecoding( + "no context", + NoContext, + () => + val analysis = new NoContextAnalysis + import analysis.given + (summon[Encoder[NoContext.type]], summon[Decoder[NoContext.type]]), + (original: NoContext.type, decoded: NoContext.type) => original should equal(decoded) + ) diff --git a/code/shared/src/test/scala/maf/test/persistence/Dependency.scala b/code/shared/src/test/scala/maf/test/persistence/Dependency.scala new file mode 100644 index 000000000..71ce4645d --- /dev/null +++ b/code/shared/src/test/scala/maf/test/persistence/Dependency.scala @@ -0,0 +1,108 @@ +package maf.test.persistence + +import maf.core.Address +import maf.core.Expression +import maf.language.scheme.SchemeExp +import maf.modular.ModAnalysis +import maf.modular.scheme.modf.SchemeModFComponent +import maf.save.LoadAddr +import maf.save.LoadNoContext +import maf.save.LoadSchemeAddr +import maf.save.LoadSchemeExpressions +import maf.save.LoadStandardSchemeComponentPosition +import maf.save.SaveAddr +import maf.save.SaveNoContext +import maf.save.SaveSchemeAddr +import maf.save.SaveStandardSchemeComponentPosition +import maf.save.save.SaveSchemeExpressions +import org.scalacheck.Gen +import maf.modular.scheme.VarAddr +import maf.modular.scheme.modf.NoContext +import maf.modular.ReturnAddr +import maf.modular.scheme.PrmAddr +import maf.modular.scheme.PtrAddr +import io.bullet.borer.Encoder +import io.bullet.borer.Decoder +import maf.save.SaveContext +import maf.save.LoadContext +import maf.modular.AddrDependency +import maf.save.SaveAddrDep +import maf.save.LoadAddrDependency +import maf.modular.Dependency + +trait AddressGenerator extends Generator with ComponentGenerator: + val addresses = + Gen.oneOf(functionGen(generateVarAddr), functionGen(generatePrmAddr), functionGen(generateReturnAddr), functionGen(generatePtrAddr)) + + def generateVarAddr(): VarAddr[Option[StringValue]] = + return new VarAddr(identifiers.sample.get, Gen.option(stringValues).sample.get) + + def generateReturnAddr(): ReturnAddr[StringValue] = + return new ReturnAddr(StringValue(str.sample.get), identities.sample.get) + + def generatePrmAddr(): PrmAddr = + return new PrmAddr(str.sample.get) + + def generatePtrAddr(): PtrAddr[Option[StringValue]] = + return new PtrAddr(PersistenceSpec.simpleSchemeExpression(str.sample.get), Gen.option(stringValues).sample.get) + +trait DependencyGenerator extends Generator: + val dependencies = stringAddr.map(addr => new AddrDependency(addr)) + +class PersistDependencySpec extends PersistenceSpec with DependencyGenerator: + class DepenencyAnalysis + extends TestAnalysis + with SaveAddrDep[Expression] + with LoadAddrDependency[Expression] + with SaveStringAddr[Expression] + with LoadStringAddr[Expression] + with SaveStringComponent[Expression] + with LoadStringComponent[Expression] + + testEncodingDecoding( + "dependency", + dependencies, + () => + val anl = new DepenencyAnalysis + import anl.given + (summon[Encoder[Dependency]], summon[Decoder[Dependency]]), + (original: Dependency, decoded: Dependency) => decoded should equal(original) + ) + +class PersistAddressSpec extends PersistenceSpec with AddressGenerator: + trait AddressAnalysis[Expr <: Expression] extends ModAnalysis[Expr] with SaveAddr[Expr] with LoadAddr[Expr] + class SchemeAddressAnalysis + extends TestSchemeAnalysis + with AddressAnalysis[SchemeExp] + with SaveSchemeAddr + with LoadSchemeAddr + with SaveStringContext[SchemeExp] + with LoadStringContext[SchemeExp] + with SaveStringComponent[SchemeExp] + with LoadStringComponent[SchemeExp] + with SaveStringExpression[SchemeExp] + with LoadStringSchemeExpression + + class NoContextAnalysis extends TestAnalysis with SaveNoContext[Expression] with LoadNoContext[Expression] + + testEncodingDecoding( + "address", + addresses, + () => + val anl = new SchemeAddressAnalysis + import anl.given + (summon[Encoder[Address]], summon[Decoder[Address]]), + (original: Address, decoded: Address) => + decoded.idn should equal(original.idn) + decoded should equal(original) + ) + + testEncodingDecoding( + "no context", + NoContext, + () => + val anl = new NoContextAnalysis + import anl.given + (summon[Encoder[NoContext.type]], summon[Decoder[NoContext.type]]), + (_: NoContext.type, decoded: NoContext.type) => decoded should equal(NoContext) + ) diff --git a/code/shared/src/test/scala/maf/test/persistence/Expression.scala b/code/shared/src/test/scala/maf/test/persistence/Expression.scala new file mode 100644 index 000000000..180ba2d9a --- /dev/null +++ b/code/shared/src/test/scala/maf/test/persistence/Expression.scala @@ -0,0 +1,119 @@ +package maf.test.persistence + +import maf.save.save.SaveSchemeExpressions +import maf.save.LoadSchemeExpressions +import maf.save.save.SaveActualExpressions +import maf.save.LoadActualExpressions +import maf.language.scheme.SchemeExp +import org.scalacheck.Gen +import maf.language.scheme.SchemeVar +import maf.core.Identifier +import maf.core.NoCodeIdentityDebug +import io.bullet.borer.Encoder +import io.bullet.borer.Decoder +import maf.language.scheme.SchemeLambda +import maf.language.scheme.SchemeFuncall +import maf.language.scheme.SchemeVarArgLambda +import maf.language.scheme.SchemeLetrec +import maf.language.scheme.SchemeAssert +import maf.language.scheme.SchemeLet +import maf.language.scheme.SchemeIf +import maf.language.scheme.SchemeSet +import maf.language.scheme.SchemeBegin +import maf.language.scheme.SchemeLetStar +import maf.modular.ModAnalysis +import maf.core.Expression +import maf.language.scheme.SchemeValue +import maf.language.sexp.Value + +trait ExpressionGenerator extends ComponentGenerator with Generator: + val schemeExpressions: Gen[SchemeExp] = functionGen(generateSchemeExp) + private val schemeExpressionList = Gen.listOfN(5, schemeExpressions) + private val identifiersList = Gen.listOfN(10, identifiers) + private val optionStrTuple = Gen.alphaLowerStr.map(str => Gen.option(Gen.const((str, Gen.alphaLowerStr.sample.get))).sample.get) + private val bindings = Gen + .listOfN(10, str) + .map(names => + // This list is converted into a set to ensure that there are no duplicate identifiers + Set(names: _*).toList.map(name => (new Identifier(name, identities.sample.get), generateSchemeExp())) + ) + + def maxDepthFunctionSchemeGen(generate: () => SchemeExp): Gen[SchemeExp] = maxDepthFunctionGen[SchemeExp](generate, generateSchemeExp) + def generateSchemeExp(): SchemeExp = + val res = Gen + .frequency( + (1, maxDepthFunctionSchemeGen(generateSchemeFuncall)), + (10, identifiers.map(id => new SchemeVar(id))), + (1, maxDepthFunctionSchemeGen(generateSchemeLambda)), + (1, maxDepthFunctionSchemeGen(generateSchemeVarArgLambda)), + (10, functionGen(generateSchemeValue)), + (1, maxDepthFunctionSchemeGen(generateSchemeLetRec)), + (1, maxDepthFunctionSchemeGen(generateSchemeAssert)), + (1, maxDepthFunctionSchemeGen(generateSchemeLet)), + (1, maxDepthFunctionSchemeGen(generateSchemeIf)), + (1, maxDepthFunctionSchemeGen(generateSchemeSet)), + (1, maxDepthFunctionSchemeGen(generateSchemeBegin)), + ) + .sample + .get + return res + + def generateSchemeValue(): SchemeValue = + return new SchemeValue(Value.String(str.sample.get), identities.sample.get) + + def generateSchemeLetStart(): SchemeLetStar = + return new SchemeLetStar(bindings.sample.get, schemeExpressionList.sample.get, identities.sample.get) + + def generateSchemeBegin(): SchemeBegin = + return new SchemeBegin(schemeExpressionList.sample.get, identities.sample.get) + + def generateSchemeSet(): SchemeSet = + return new SchemeSet(identifiers.sample.get, schemeExpressions.sample.get, identities.sample.get) + + def generateSchemeIf(): SchemeIf = + return new SchemeIf(schemeExpressions.sample.get, schemeExpressions.sample.get, schemeExpressions.sample.get, identities.sample.get) + + def generateSchemeLet(): SchemeLet = + return new SchemeLet(bindings.sample.get, schemeExpressionList.sample.get, identities.sample.get) + + def generateSchemeAssert(): SchemeAssert = + return new SchemeAssert(schemeExpressions.sample.get, identities.sample.get) + + def generateSchemeLetRec(): SchemeLetrec = + return new SchemeLetrec(bindings.sample.get, schemeExpressionList.sample.get, identities.sample.get) + + def generateSchemeVarArgLambda(): SchemeVarArgLambda = + return new SchemeVarArgLambda(optionStr.sample.get, + identifiersList.sample.get, + identifiers.sample.get, + schemeExpressionList.sample.get, + optionStrTuple.sample.get, + identities.sample.get + ) + + def generateSchemeFuncall(): SchemeFuncall = + return new SchemeFuncall(generateSchemeExp(), schemeExpressionList.sample.get, identities.sample.get) + + def generateSchemeLambda(): SchemeLambda = + val str = Gen.alphaLowerStr.sample.get + val id = identities.sample.get + val lambda = new SchemeLambda(optionStr.sample.get, identifiersList.sample.get, schemeExpressionList.sample.get, optionStrTuple.sample.get, id) + return lambda + +class PersistExpressionSpec extends PersistenceSpec with ExpressionGenerator: + trait ExpressionAnalysis[Expr <: Expression] extends ModAnalysis[Expr] with SaveActualExpressions[Expr] with LoadActualExpressions[Expr] + class SchemeExpressionAnalysis extends TestSchemeAnalysis with ExpressionAnalysis[SchemeExp] with SaveSchemeExpressions with LoadSchemeExpressions + + testEncodingDecoding( + "scheme expressions", + schemeExpressions, + () => + val anl = new SchemeExpressionAnalysis + import anl.given + (summon[Encoder[SchemeExp]], summon[Decoder[SchemeExp]]), + (original: SchemeExp, decoded: SchemeExp) => + decoded.idn should equal(original.idn) + decoded.height should equal(original.height) + decoded should equal(original), + false + ) diff --git a/code/shared/src/test/scala/maf/test/persistence/Store.scala b/code/shared/src/test/scala/maf/test/persistence/Store.scala new file mode 100644 index 000000000..ad2f7a25b --- /dev/null +++ b/code/shared/src/test/scala/maf/test/persistence/Store.scala @@ -0,0 +1,153 @@ +package maf.test.persistence + +import maf.save.SaveValue +import maf.modular.ModAnalysis +import maf.save.LoadValue +import maf.save.LoadModularDomain +import maf.core.Expression +import maf.language.scheme.SchemeExp +import maf.save.SaveLattice +import maf.save.LoadLattice +import maf.lattice.HMapKey +import maf.language.scheme.lattices.SchemeLattice +import maf.modular.scheme.SchemeConstantPropagationDomain +import maf.language.scheme.lattices.ModularSchemeLattice +import maf.modular.scheme.ModularSchemeLatticeWrapper +import maf.lattice.ConstantPropagation +import org.scalacheck.Gen +import io.bullet.borer.Encoder +import io.bullet.borer.Decoder +import maf.lattice.HMap +import maf.lattice.interfaces.IntLattice +import scala.collection.mutable.HashMap +import maf.core.BasicEnvironment +import maf.core.Address +import maf.save.SaveModularSchemeDomain +import maf.save.SaveSchemeConstantPropagationDomain +import maf.save.LoadModularSchemeDomain + +trait ValueGenerator extends Generator: + type SchemeLattice = ModularSchemeLattice[?, ?, ?, ?, ?, ?, ?] + val modularLattice = SchemeConstantPropagationDomain.modularLattice + + val schemeLattices: Gen[(HMapKey, SchemeLattice#Value)] = functionGen(generateSchemeLattice) + val schemeLatticeMaps = Gen.mapOfN(3, schemeLattices) + + def generateSchemeLattice(): (HMapKey, SchemeLattice#Value) = + val lattice = Gen + .oneOf( + functionGen(generateSchemeIntLattice), + functionGen(generateSchemeBoolLattice), + functionGen(generateSchemeStringLattice), + functionGen(generateSchemeCharLattice), + functionGen(generateSchemePrimLattice), + functionGen(generateSchemeKontLattice), + functionGen(generateSchemeSymbolLattice), + maxDepthFunctionGen(generateSchemeConsLattice, generateSchemeLattice), + maxDepthFunctionGen(generateSchemeVectorLattice, generateSchemeLattice), + maxDepthFunctionGen(generateSchemeInputPortLattice, generateSchemeLattice), + functionGen(generateSchemeClosureLattice), + functionGen(generateSchemeNilLattice), + functionGen(generateSchemeVoidLattice), + functionGen(generateSchemePointerLattice) + ) + .sample + .get + return lattice + + val hMaps = schemeLatticeMaps.map((map) => new HMap(map)) + + def generateConstantSchemeLattice[T](gen: Gen[T]): Gen[ConstantPropagation.L[T]] = + return Gen.oneOf(ConstantPropagation.Top, ConstantPropagation.Bottom, ConstantPropagation.Constant(gen.sample.get)) + + def generateSchemeIntLattice(): (HMapKey, SchemeLattice#Int) = + return (modularLattice.IntT, new modularLattice.Int(generateConstantSchemeLattice(Gen.Choose.chooseBigInt.choose(0, 1000000)).sample.get)) + + def generateSchemeBoolLattice(): (HMapKey, SchemeLattice#Bool) = + return (modularLattice.BoolT, new modularLattice.Bool(generateConstantSchemeLattice(Gen.prob(0.5)).sample.get)) + + def generateSchemeStringLattice(): (HMapKey, SchemeLattice#Str) = + return (modularLattice.StrT, new modularLattice.Str(generateConstantSchemeLattice(str.sample.get).sample.get)) + + def generateSchemeCharLattice(): (HMapKey, SchemeLattice#Char) = + return (modularLattice.CharT, new modularLattice.Char(generateConstantSchemeLattice(Gen.alphaChar).sample.get)) + + def generateSchemeInputPortLattice(): (HMapKey, SchemeLattice#InputPort) = + return (modularLattice.InputPortT, new modularLattice.InputPort(hMaps.sample.get)) + + def generateSchemeKontLattice(): (HMapKey, SchemeLattice#Kont) = + return (modularLattice.KontT, + new modularLattice.Kont( + Gen.listOfN(10, stringComponents).sample.get.asInstanceOf[List[SchemeLattice#Kont]].toSet + ) + ) + + def generateSchemePrimLattice(): (HMapKey, SchemeLattice#Prim) = + return (modularLattice.PrimT, new modularLattice.Prim(Gen.listOfN(5, str).sample.get.toSet)) + + def generateSchemeClosureLattice(): (HMapKey, SchemeLattice#Clo) = + return (modularLattice.CloT, + new modularLattice.Clo( + Set( + (PersistenceSpec.simpleSchemeLambdaExpression(str.sample.get), + new BasicEnvironment[Address](Map((str.sample.get, stringAddr.sample.get))) + ) + ) + ) + ) + + def generateSchemePointerLattice(): (HMapKey, SchemeLattice#Pointer) = + return (modularLattice.PointerT, modularLattice.Pointer(Gen.listOfN(5, stringAddr).sample.get.toSet)) + + def generateSchemeSymbolLattice(): (HMapKey, SchemeLattice#Symbol) = + return (modularLattice.SymbolT, new modularLattice.Symbol(generateConstantSchemeLattice(str).sample.get)) + + def generateSchemeConsLattice(): (HMapKey, SchemeLattice#Cons) = + val car = hMaps.sample.get + val cdr = hMaps.sample.get + return (modularLattice.ConsT, new modularLattice.Cons(car, cdr)) + + def generateSchemeVectorLattice(): (HMapKey, SchemeLattice#Vec) = + val length = Gen.Choose.chooseBigInt.choose(1, 5).sample.get + val lengthLattice = ConstantPropagation.Constant(length) + + val list = (for i <- Range(0, length.toInt) yield ((ConstantPropagation.Constant(BigInt(i)), hMaps.sample.get))) + return (modularLattice.VecT, modularLattice.Vec(lengthLattice, list.toMap)) + + def generateSchemeNilLattice(): (HMapKey, modularLattice.Nil.type) = + return (modularLattice.NilT, modularLattice.Nil) + + def generateSchemeVoidLattice(): (HMapKey, modularLattice.Void.type) = + return (modularLattice.VoidT, modularLattice.Void) + +class PersistValueSpec extends PersistenceSpec with ValueGenerator: + trait ValueAnalysis[Expr <: Expression] extends ModAnalysis[Expr] with SaveValue[Expr] with LoadValue[Expr] + class ModularDomainAnalysis + extends TestBaseSchemeModFSemanticsAnalysis + with ValueAnalysis[SchemeExp] + with SaveModularSchemeDomain + with SaveSchemeConstantPropagationDomain + with LoadModularDomain + with LoadModularSchemeDomain + with SaveStringContext[SchemeExp] + with LoadStringContext[SchemeExp] + with SaveStringComponent[SchemeExp] + with LoadStringComponent[SchemeExp] + with SaveStringExpression[SchemeExp] + with LoadStringSchemeExpression + with SaveStringAddr[SchemeExp] + with LoadStringAddr[SchemeExp]: + override val modularLatticeWrapper: ModularSchemeLatticeWrapper = SchemeConstantPropagationDomain + + testEncodingDecoding( + "lattice", + schemeLattices, + () => + val anl = ModularDomainAnalysis() + import anl.given + (summon[Encoder[(HMapKey, Any)]], summon[Decoder[(HMapKey, Any)]]), + (original: (HMapKey, Any), decoded: (HMapKey, Any)) => + decoded._1 should equal(original._1) + decoded._2 should equal(original._2), + false + )