From 5bd765acad4a0720442b9d89549ff5862ac6c714 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Wed, 1 May 2024 14:36:05 +1000 Subject: [PATCH 001/104] Injecting memory regions into IR --- build.sbt | 2 + .../InterprocSteensgaardAnalysis.scala | 22 +- .../scala/analysis/RegToMemAnalysis.scala | 2 - src/main/scala/analysis/RegionInjector.scala | 252 ++++++++++++++++++ src/main/scala/analysis/UtilMethods.scala | 23 +- src/main/scala/util/RunUtils.scala | 50 +++- 6 files changed, 311 insertions(+), 40 deletions(-) create mode 100644 src/main/scala/analysis/RegionInjector.scala diff --git a/build.sbt b/build.sbt index 34e8b69bf..fbc1dcaf7 100644 --- a/build.sbt +++ b/build.sbt @@ -10,6 +10,7 @@ val scalactic = "org.scalactic" %% "scalactic" % "3.2.10" val antlrRuntime = "org.antlr" % "antlr4-runtime" % "4.9.3" val sourceCode = "com.lihaoyi" %% "sourcecode" % "0.3.0" val mainArgs = "com.lihaoyi" %% "mainargs" % "0.5.1" +val parralelCollections = "org.scala-lang.modules" %% "scala-parallel-collections" % "1.0.4" lazy val root = project .in(file(".")) @@ -26,6 +27,7 @@ lazy val root = project libraryDependencies += scalaTests, libraryDependencies += sourceCode, libraryDependencies += mainArgs, + libraryDependencies += parralelCollections, libraryDependencies += "org.scalameta" %% "munit" % "0.7.29" % Test ) diff --git a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala index d38bcf960..cb76f491d 100644 --- a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala +++ b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala @@ -65,24 +65,6 @@ class InterprocSteensgaardAnalysis( s"malloc_$mallocCount" } - /** - * In expressions that have accesses within a region, we need to relocate - * the base address to the actual address using the relocation table. - * MUST RELOCATE because MMM iterate to find the lowest address - * TODO: May need to iterate over the relocation table to find the actual address - * - * @param address - * @return BitVecLiteral: the relocated address - */ - def relocatedBase(address: BitVecLiteral): BitVecLiteral = { - val tableAddress = globalOffsets.getOrElse(address.value, address.value) - // this condition checks if the address is not layered and returns if it is not - if (tableAddress != address.value && !globalOffsets.contains(tableAddress)) { - return address - } - BitVecLiteral(tableAddress, address.size) - } - /** * Used to reduce an expression that may be a sub-region of a memory region. * Pointer reduction example: @@ -158,9 +140,7 @@ class InterprocSteensgaardAnalysis( reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, b2), n) } case dataRegion: DataRegion => - println(s"Hey, I'm a data region: ${dataRegion}") - println(s"Hey, I'm a offset: ${b}") - val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start), b) + val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start, globalOffsets), b) evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => reducedRegions ++= exprToRegion(b2, n) } diff --git a/src/main/scala/analysis/RegToMemAnalysis.scala b/src/main/scala/analysis/RegToMemAnalysis.scala index 458444505..0f5e5a073 100644 --- a/src/main/scala/analysis/RegToMemAnalysis.scala +++ b/src/main/scala/analysis/RegToMemAnalysis.scala @@ -34,8 +34,6 @@ trait RegionAccessesAnalysis(cfg: ProgramCfg, constantProp: Map[CFGPosition, Map s + (RegisterVariableWrapper(localAssign.lhs, getDefinition(localAssign.lhs, cmd.data, reachingDefs)) -> FlatEl(memoryLoad)) case binaryExpr: BinaryExpr => if (evaluateExpression(binaryExpr.arg1, constants).isEmpty) { // approximates Base + Offset - println(s"Approximating ${localAssign} in $binaryExpr") - println(s"Reaching defs: ${reachingDefs(cmd.data)}") s + (RegisterVariableWrapper(localAssign.lhs, getDefinition(localAssign.lhs, cmd.data, reachingDefs)) -> FlatEl(binaryExpr)) } else { s diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala new file mode 100644 index 000000000..db7c95b1d --- /dev/null +++ b/src/main/scala/analysis/RegionInjector.scala @@ -0,0 +1,252 @@ +package analysis + +import ir.* +import util.Logger +import scala.collection.immutable +import scala.collection.mutable + +/** + * Replaces the region access with the calculated memory region. + */ +class RegionInjector(domain: mutable.Set[CFGPosition], + constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], + mmm: MemoryModelMap, + reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], + globalOffsets: Map[BigInt, BigInt]) { + private val stackPointer = Register("R31", BitVecType(64)) + + def nodeVisitor(): Unit = { + for (elem <- domain) {localTransfer(elem)} + } + + /** + * Used to reduce an expression that may be a sub-region of a memory region. + * Pointer reduction example: + * R2 = R31 + 20 + * Mem[R2 + 8] <- R1 + * + * Steps: + * 1) R2 = R31 + 20 <- ie. stack access (assume R31 = stackPointer) + * ↓ + * R2 = StackRegion("stack_1", 20) + * + * 2) Mem[R2 + 8] <- R1 <- ie. memStore + * ↓ + * (StackRegion("stack_1", 20) + 8) <- R1 + * ↓ + * MMM.get(20 + 8) <- R1 + * + * @param binExpr + * @param n + * @return Set[MemoryRegion]: a set of regions that the expression may be pointing to + */ + def reducibleToRegion(binExpr: BinaryExpr, n: Command): Set[MemoryRegion] = { + var reducedRegions = Set.empty[MemoryRegion] + binExpr.arg1 match { + case variable: Variable => + evaluateExpressionWithSSA(binExpr, constantProp(n), n, reachingDefs).foreach { b => + val region = mmm.findDataObject(b.value) + reducedRegions = reducedRegions ++ region + } + if (reducedRegions.nonEmpty) { + return reducedRegions + } + val ctx = getUse(variable, n, reachingDefs) + for (i <- ctx) { + if (i != n) { // handles loops (ie. R19 = R19 + 1) %00000662 in jumptable2 + val regions = i.rhs match { + case loadL: MemoryLoad => + val foundRegions = exprToRegion(loadL.index, i) + val toReturn = mutable.Set[MemoryRegion]().addAll(foundRegions) + for { + f <- foundRegions + } { + // TODO: Must enable this (probably need to calculate those contents beforehand) +// if (memoryRegionContents.contains(f)) { +// memoryRegionContents(f).foreach { +// case b: BitVecLiteral => +// // val region = mmm.findDataObject(b.value) +// // if (region.isDefined) { +// // toReturn.addOne(region.get) +// // } +// case r: MemoryRegion => +// toReturn.addOne(r) +// toReturn.remove(f) +// } +// } + } + toReturn.toSet + case _: BitVecLiteral => + Set.empty[MemoryRegion] + case _ => + println(s"Unknown expression: ${i}") + println(ctx) + exprToRegion(i.rhs, i) + } + val results = evaluateExpressionWithSSA(binExpr.arg2, constantProp(n), n, reachingDefs) + for { + b <- results + r <- regions + } { + r match { + case stackRegion: StackRegion => + val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) + evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => + reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, b2), n) + } + case dataRegion: DataRegion => + val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start, globalOffsets), b) + evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => + reducedRegions ++= exprToRegion(b2, n) + } + case _ => + } + } + } + } + case _ => + } + reducedRegions + } + + /** + * Finds a region for a given expression using MMM results + * + * @param expr + * @param n + * @return Set[MemoryRegion]: a set of regions that the expression may be pointing to + */ + def exprToRegion(expr: Expr, n: Command): Set[MemoryRegion] = { + var res = Set[MemoryRegion]() + mmm.popContext() + mmm.pushContext(IRWalk.procedure(n).name) + expr match { // TODO: Stack detection here should be done in a better way or just merged with data + case binOp: BinaryExpr if binOp.arg1 == stackPointer => + evaluateExpressionWithSSA(binOp.arg2, constantProp(n), n, reachingDefs).foreach { b => + if binOp.arg2.variables.exists { v => v.sharedVariable } then { + Logger.debug("Shared stack object: " + b) + Logger.debug("Shared in: " + expr) + val regions = mmm.findSharedStackObject(b.value) + Logger.debug("found: " + regions) + res ++= regions + } else { + val region = mmm.findStackObject(b.value) + if (region.isDefined) { + res = res + region.get + } + } + } + res + case binaryExpr: BinaryExpr => + res ++= reducibleToRegion(binaryExpr, n) + res + case v: Variable if v == stackPointer => + res ++= mmm.findStackObject(0) + res + case v: Variable => + evaluateExpressionWithSSA(expr, constantProp(n), n, reachingDefs).foreach { b => + Logger.debug("BitVecLiteral: " + b) + val region = mmm.findDataObject(b.value) + if (region.isDefined) { + res += region.get + } + } + if (res.isEmpty) { // may be passed as param + val ctx = getUse(v, n, reachingDefs) + for (i <- ctx) { + i.rhs match { + case load: MemoryLoad => // treat as a region + res ++= exprToRegion(load.index, i) + case binaryExpr: BinaryExpr => + res ++= reducibleToRegion(binaryExpr, i) + res ++= exprToRegion(i.rhs, i) + case _ => // also treat as a region (for now) even if just Base + Offset without memLoad + res ++= exprToRegion(i.rhs, i) + } + } + } + res + case _ => + evaluateExpressionWithSSA(expr, constantProp(n), n, reachingDefs).foreach { b => + Logger.debug("BitVecLiteral: " + b) + val region = mmm.findDataObject(b.value) + if (region.isDefined) { + res += region.get + } + } + res + } + } + + /** Default implementation of eval. + */ + def eval(expr: Expr, cmd: Command): Expr = { + expr match + case literal: Literal => literal // ignore literals + case Extract(end, start, body) => + Extract(end, start, eval(body, cmd)) + case Repeat(repeats, body) => + Repeat(repeats, eval(body, cmd)) + case ZeroExtend(extension, body) => + ZeroExtend(extension, eval(body, cmd)) + case SignExtend(extension, body) => + SignExtend(extension, eval(body, cmd)) + case UnaryExpr(op, arg) => + UnaryExpr(op, eval(arg, cmd)) + case BinaryExpr(op, arg1, arg2) => + BinaryExpr(op, eval(arg1, cmd), eval(arg2, cmd)) + case MemoryStore(mem, index, value, endian, size) => + // TODO: index should be replaced region + val regions = exprToRegion(eval(index, cmd), cmd) + if (regions.size == 1) { + MemoryStore(Memory(regions.head.regionIdentifier, mem.addressSize, mem.valueSize), eval(index, cmd), eval(value, cmd), endian, size) + } else if (regions.size > 1) { + Logger.warn(s"MemStore is: ${cmd}") + Logger.warn(s"Multiple regions found for memory store: ${regions}") + expr + } else { + Logger.warn(s"No region found for memory store") + expr + } + case MemoryLoad(mem, index, endian, size) => + // TODO: index should be replaced region + val regions = exprToRegion(eval(index, cmd), cmd) + if (regions.size == 1) { + MemoryLoad(Memory(regions.head.regionIdentifier, mem.addressSize, mem.valueSize), eval(index, cmd), endian, size) + } else if (regions.size > 1) { + Logger.warn(s"MemLoad is: ${cmd}") + Logger.warn(s"Multiple regions found for memory load: ${regions}") + expr + } else { + Logger.warn(s"No region found for memory load") + expr + } + case Memory(name, addressSize, valueSize) => + expr // ignore memory + case variable: Variable => variable // ignore variables + } + + /** Transfer function for state lattice elements. + */ + def localTransfer(n: CFGPosition): Unit = n match { + case cmd: Command => + cmd match + case statement: Statement => statement match + case assign: LocalAssign => + assign.rhs = eval(assign.rhs, cmd) + case mAssign: MemoryAssign => + mAssign.lhs = eval(mAssign.lhs, cmd).asInstanceOf[Memory] + mAssign.rhs = eval(mAssign.rhs, cmd).asInstanceOf[MemoryStore] + case nop: NOP => // ignore NOP + case assert: Assert => + assert.body = eval(assert.body, cmd) + case assume: Assume => + assume.body = eval(assume.body, cmd) + case jump: Jump => jump match + case to: GoTo => // ignore GoTo + case call: Call => call match + case call: DirectCall => // ignore DirectCall + case call: IndirectCall => // ignore IndirectCall + case _ => // ignore other kinds of nodes + } +} \ No newline at end of file diff --git a/src/main/scala/analysis/UtilMethods.scala b/src/main/scala/analysis/UtilMethods.scala index 9265110c5..e6f138c12 100644 --- a/src/main/scala/analysis/UtilMethods.scala +++ b/src/main/scala/analysis/UtilMethods.scala @@ -108,10 +108,6 @@ def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapper val result = evaluateExpressionWithSSA(e.body, constantPropResult, n, reachingDefs) applySingle(BitVectorEval.boogie_extract(e.end, e.start, _: BitVecLiteral), result) case variable: Variable => - println("Variable: " + variable) - println("node: " + n) - println("reachingDefs: " + reachingDefs(n)) - println("getUse: " + getUse(variable, n, reachingDefs)) constantPropResult(RegisterWrapperEqualSets(variable, getUse(variable, n, reachingDefs))) case b: BitVecLiteral => Set(b) case _ => throw new RuntimeException("ERROR: CASE NOT HANDLED: " + exp + "\n") @@ -128,6 +124,25 @@ def getUse(variable: Variable, node: CFGPosition, reachingDefs: Map[CFGPosition, out.getOrElse(variable, Set()) } +/** + * In expressions that have accesses within a region, we need to relocate + * the base address to the actual address using the relocation table. + * MUST RELOCATE because MMM iterate to find the lowest address + * TODO: May need to iterate over the relocation table to find the actual address + * + * @param address + * @param globalOffsets + * @return BitVecLiteral: the relocated address + */ +def relocatedBase(address: BitVecLiteral, globalOffsets: Map[BigInt, BigInt]): BitVecLiteral = { + val tableAddress = globalOffsets.getOrElse(address.value, address.value) + // this condition checks if the address is not layered and returns if it is not + if (tableAddress != address.value && !globalOffsets.contains(tableAddress)) { + return address + } + BitVecLiteral(tableAddress, address.size) +} + def unwrapExpr(expr: Expr): Set[Expr] = { var buffers: Set[Expr] = Set() expr match { diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index bbb7a2b61..1dbcee6eb 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -467,6 +467,7 @@ object StaticAnalysis { config: StaticAnalysisConfig, iteration: Int ): StaticAnalysisContext = { + val before = System.nanoTime() val IRProgram: Program = ctx.program val externalFunctions: Set[ExternalFunction] = ctx.externalFunctions val globals: Set[SpecGlobal] = ctx.globals @@ -487,7 +488,6 @@ object StaticAnalysis { Logger.info("Subroutine Addresses:") Logger.info(subroutines) - // reducible loops val detector = LoopDetector(IRProgram) val foundLoops = detector.identify_loops() @@ -497,6 +497,8 @@ object StaticAnalysis { val newLoops = transformer.llvm_transform() newLoops.foreach(l => Logger.info(s"Loop found: ${l.name}")) + println(s"Finished Loop Transform at ${(System.nanoTime() - before)/1000000} ms") + config.analysisDotPath.foreach { s => val newCFG = ProgramCfgFactory().fromIR(IRProgram) writeToFile(newCFG.toDot(x => x.toString, Output.dotIder), s"${s}_resolvedCFG-reducible.dot") @@ -507,23 +509,34 @@ object StaticAnalysis { val cfg = ProgramCfgFactory().fromIR(IRProgram) + println(s"Finished CFG gen at ${(System.nanoTime() - before) / 1000000} ms") + val domain = computeDomain(IntraProcIRCursor, IRProgram.procedures) Logger.info("[!] Running ANR") val ANRSolver = ANRAnalysisSolver(IRProgram) val ANRResult = ANRSolver.analyze() + println(s"Finished ANR at ${(System.nanoTime() - before) / 1000000} ms") + Logger.info("[!] Running RNA") val RNASolver = RNAAnalysisSolver(IRProgram) val RNAResult = RNASolver.analyze() + println(s"Finished RNA at ${(System.nanoTime() - before) / 1000000} ms") + Logger.info("[!] Running Constant Propagation") val constPropSolver = ConstantPropagationSolver(IRProgram) val constPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = constPropSolver.analyze() + println(s"Finished ConstProp at ${(System.nanoTime() - before) / 1000000} ms") + + Logger.info("[!] Running IR Simple Value Analysis") val ilcpsolver = IRSimpleValueAnalysis.Solver(IRProgram) val newCPResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = ilcpsolver.analyze() + println(s"Finished IR Simple Value Analysis at ${(System.nanoTime() - before) / 1000000} ms") + config.analysisResultsPath.foreach(s => writeToFile(printAnalysisResults(IRProgram, newCPResult), s"${s}_new_ir_constprop$iteration.txt") ) @@ -533,9 +546,12 @@ object StaticAnalysis { writeToFile(toDot(dumpdomain, InterProcIRCursor, Map.empty), s"${f}_new_ir_intercfg$iteration.dot") }) + Logger.info("[!] Running Reaching Definitions Analysis") val reachingDefinitionsAnalysisSolver = ReachingDefinitionsAnalysisSolver(IRProgram) val reachingDefinitionsAnalysisResults = reachingDefinitionsAnalysisSolver.analyze() + println(s"Finished reaching definitions at ${(System.nanoTime() - before) / 1000000} ms") + config.analysisDotPath.foreach(s => { writeToFile( toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> reachingDefinitionsAnalysisResults(b).toString).toMap), @@ -548,6 +564,8 @@ object StaticAnalysis { val regionAccessesAnalysisSolver = RegionAccessesAnalysisSolver(cfg, constPropResult, reachingDefinitionsAnalysisResults) val regionAccessesAnalysisResults = regionAccessesAnalysisSolver.analyze() + println(s"Finished region accesses at ${(System.nanoTime() - before) / 1000000} ms") + config.analysisDotPath.foreach(s => writeToFile(cfg.toDot(Output.labeler(regionAccessesAnalysisResults, true), Output.dotIder), s"${s}_RegTo$iteration.dot")) config.analysisResultsPath.foreach(s => writeToFile(printAnalysisResults(cfg, regionAccessesAnalysisResults, iteration), s"${s}_RegTo$iteration.txt")) @@ -555,10 +573,28 @@ object StaticAnalysis { val constPropSolverWithSSA = ConstantPropagationSolverWithSSA(IRProgram, reachingDefinitionsAnalysisResults) val constPropResultWithSSA = constPropSolverWithSSA.analyze() + println(s"Finished ConstProp with SSA at ${(System.nanoTime() - before) / 1000000} ms") + Logger.info("[!] Running MRA") val mraSolver = MemoryRegionAnalysisSolver(IRProgram, globalAddresses, globalOffsets, mergedSubroutines, constPropResult, ANRResult, RNAResult, regionAccessesAnalysisResults, reachingDefinitionsAnalysisResults) val mraResult = mraSolver.analyze() + Logger.info("[!] Running MMM") + val mmm = MemoryModelMap() + mmm.convertMemoryRegions(mraResult, mergedSubroutines, globalOffsets, mraSolver.procedureToSharedRegions) + mmm.logRegions() + + Logger.info("[!] Injecting regions") + val regionInjector = RegionInjector(domain, constPropResultWithSSA, mmm, reachingDefinitionsAnalysisResults, globalOffsets) + regionInjector.nodeVisitor() + + Logger.info("[!] Running Steensgaard") + val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResultWithSSA, regionAccessesAnalysisResults, mmm, reachingDefinitionsAnalysisResults, globalOffsets) + steensgaardSolver.analyze() + val steensgaardResults = steensgaardSolver.pointsTo() + val memoryRegionContents = steensgaardSolver.getMemoryRegionContents + mmm.logRegions(memoryRegionContents) + config.analysisDotPath.foreach(s => { writeToFile(dotCallGraph(IRProgram), s"${s}_callgraph$iteration.dot") writeToFile( @@ -577,18 +613,6 @@ object StaticAnalysis { ) }) - Logger.info("[!] Running MMM") - val mmm = MemoryModelMap() - mmm.convertMemoryRegions(mraResult, mergedSubroutines, globalOffsets, mraSolver.procedureToSharedRegions) - mmm.logRegions() - - Logger.info("[!] Running Steensgaard") - val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResultWithSSA, regionAccessesAnalysisResults, mmm, reachingDefinitionsAnalysisResults, globalOffsets) - steensgaardSolver.analyze() - val steensgaardResults = steensgaardSolver.pointsTo() - val memoryRegionContents = steensgaardSolver.getMemoryRegionContents - mmm.logRegions(memoryRegionContents) - Logger.info("[!] Running VSA") val vsaSolver = ValueSetAnalysisSolver(IRProgram, globalAddresses, externalAddresses, globalOffsets, subroutines, mmm, constPropResult) From e878a767c9a50f9f4a500f45146779790823e146 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Fri, 17 May 2024 16:09:55 +1000 Subject: [PATCH 002/104] Loop handling in MRA (using depth for now) --- .../InterprocSteensgaardAnalysis.scala | 11 ++- .../scala/analysis/MemoryRegionAnalysis.scala | 83 ++++++++++++------- src/main/scala/analysis/RegionInjector.scala | 13 ++- .../analysis/solvers/FixPointSolver.scala | 6 +- src/main/scala/util/RunUtils.scala | 2 +- 5 files changed, 70 insertions(+), 45 deletions(-) diff --git a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala index cb76f491d..4f5ccc458 100644 --- a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala +++ b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala @@ -90,7 +90,8 @@ class InterprocSteensgaardAnalysis( var reducedRegions = Set.empty[MemoryRegion] binExpr.arg1 match { case variable: Variable => - evaluateExpressionWithSSA(binExpr, constantProp(n), n, reachingDefs).foreach { b => + val a = evaluateExpressionWithSSA(binExpr, constantProp(n), n, reachingDefs) + a.foreach { b => val region = mmm.findDataObject(b.value) reducedRegions = reducedRegions ++ region } @@ -135,9 +136,11 @@ class InterprocSteensgaardAnalysis( } { r match { case stackRegion: StackRegion => - val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) - evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => - reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, b2), n) + if (b.size == stackRegion.start.size) { + val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) + evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => + reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, b2), n) + } } case dataRegion: DataRegion => val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start, globalOffsets), b) diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 29348b7ba..2fa9b841e 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -15,7 +15,8 @@ trait MemoryRegionAnalysis(val program: Program, val ANRResult: Map[CFGPosition, Set[Variable]], val RNAResult: Map[CFGPosition, Set[Variable]], val regionAccesses: Map[CfgNode, Map[RegisterVariableWrapper, FlatElement[Expr]]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])]) { + val reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], + val maxDepth: Int) { var mallocCount: Int = 0 private var stackCount: Int = 0 @@ -90,9 +91,19 @@ trait MemoryRegionAnalysis(val program: Program, // TODO: this could be used instead of regionAccesses in other analyses to reduce the Expr to region conversion private val registerToRegions: mutable.Map[RegisterVariableWrapper, mutable.Set[MemoryRegion]] = mutable.Map() val procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]] = mutable.Map() + var depthMap: mutable.Map[CFGPosition, Int] = mutable.Map() def reducibleToRegion(binExpr: BinaryExpr, n: Command): Set[MemoryRegion] = { var reducedRegions = Set.empty[MemoryRegion] + if (depthMap.contains(n)) { + if (depthMap(n) > maxDepth) { + depthMap += (n -> 0) + return reducedRegions + } + } else { + depthMap += (n -> 0) + } + depthMap(n) += 1 binExpr.arg1 match { case variable: Variable => val ctx = getUse(variable, n, reachingDefs) @@ -103,6 +114,8 @@ trait MemoryRegionAnalysis(val program: Program, case _: BitVecLiteral => Set.empty case _ => + println(s"OG $n") + println(s"Unreducible: $i") eval(i.rhs, Set.empty, i) } evaluateExpression(binExpr.arg2, constantProp(n)) match { @@ -126,22 +139,24 @@ trait MemoryRegionAnalysis(val program: Program, } def eval(exp: Expr, env: Set[MemoryRegion], n: Command): Set[MemoryRegion] = { - Logger.debug(s"evaluating $exp") - Logger.debug(s"env: $env") - Logger.debug(s"n: $n") + println(s"Asked to evaluate: $exp at ${n.label}") exp match { case binOp: BinaryExpr => if (spList.contains(binOp.arg1)) { evaluateExpression(binOp.arg2, constantProp(n)) match { case Some(b: BitVecLiteral) => Set(poolMaster(b, IRWalk.procedure(n))) - case None => env + case None => throw RuntimeException(s"This should be reducible: $exp") } - } else if (reducibleToRegion(binOp, n).nonEmpty) { - reducibleToRegion(binOp, n) } else { - evaluateExpression(binOp, constantProp(n)) match { - case Some(b: BitVecLiteral) => eval(b, env, n) - case None => env + val reduced = reducibleToRegion(binOp, n) + if (reduced.nonEmpty) { + println(s"Reducible: exp $exp") + reduced + } else { + evaluateExpression(binOp, constantProp(n)) match { + case Some(b: BitVecLiteral) => eval(b, env, n) + case None => eval(binOp.arg1, env, n) ++ eval(binOp.arg2, env, n) + } } } case variable: Variable => @@ -149,7 +164,7 @@ trait MemoryRegionAnalysis(val program: Program, case _: LocalVar => env case reg: Register if spList.contains(reg) => - eval(BitVecLiteral(0, 64), env, n) + Set(poolMaster(BitVecLiteral(0, 64), IRWalk.procedure(n))) case _ => evaluateExpression(variable, constantProp(n)) match { case Some(b: BitVecLiteral) => @@ -162,11 +177,23 @@ trait MemoryRegionAnalysis(val program: Program, eval(memoryLoad.index, env, n) // ignore case where it could be a global region (loaded later in MMM from relf) case b: BitVecLiteral => - Set(poolMaster(b, IRWalk.procedure(n))) - // we cannot evaluate this to a concrete value, we need VSA for this - case _ => - Logger.debug(s"type: ${exp.getClass} $exp\n") - throw new Exception("Unknown type") + env + case literal: Literal => // ignore literals other than BitVectors + env + case extract: Extract => + eval(extract.body, env, n) + case repeat: Repeat => + eval(repeat.body, env, n) + case zeroExtend: ZeroExtend => + eval(zeroExtend.body, env, n) + case signExtend: SignExtend => + eval(signExtend.body, env, n) + case unaryExpr: UnaryExpr => + eval(unaryExpr.arg, env, n) + case memoryStore: MemoryStore => + eval(memoryStore.index, env, n) ++ eval(memoryStore.value, env, n) + case memory: Memory => + env } } @@ -174,6 +201,7 @@ trait MemoryRegionAnalysis(val program: Program, */ def localTransfer(n: CFGPosition, s: Set[MemoryRegion]): Set[MemoryRegion] = n match { case cmd: Command => + println(s"N: $n") cmd match { case directCall: DirectCall => val ANR = ANRResult(cmd) @@ -202,22 +230,12 @@ trait MemoryRegionAnalysis(val program: Program, s } case memAssign: MemoryAssign => - if (ignoreRegions.contains(memAssign.rhs.value)) { - s - } else { - val result = eval(memAssign.rhs.index, s, cmd) - regionLattice.lub(s, result) - } + val result = eval(memAssign.rhs.index, s, cmd) + regionLattice.lub(s, result) case localAssign: LocalAssign => stackDetection(localAssign) - var m = s - unwrapExpr(localAssign.rhs).foreach { - case memoryLoad: MemoryLoad => - val result = eval(memoryLoad.index, s, cmd) - m = regionLattice.lub(m, result) - case _ => m - } - m + val result = eval(localAssign.rhs, s, cmd) + regionLattice.lub(s, result) case _ => s } case _ => s // ignore other kinds of nodes @@ -235,8 +253,9 @@ class MemoryRegionAnalysisSolver( ANRResult: Map[CFGPosition, Set[Variable]], RNAResult: Map[CFGPosition, Set[Variable]], regionAccesses: Map[CfgNode, Map[RegisterVariableWrapper, FlatElement[Expr]]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])] - ) extends MemoryRegionAnalysis(program, globals, globalOffsets, subroutines, constantProp, ANRResult, RNAResult, regionAccesses, reachingDefs) + reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], + maxDepth: Int + ) extends MemoryRegionAnalysis(program, globals, globalOffsets, subroutines, constantProp, ANRResult, RNAResult, regionAccesses, reachingDefs, maxDepth) with IRIntraproceduralForwardDependencies with Analysis[Map[CFGPosition, LiftedElement[Set[MemoryRegion]]]] with WorklistFixpointSolverWithReachability[CFGPosition, Set[MemoryRegion], PowersetLattice[MemoryRegion]] { diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index db7c95b1d..63358aeab 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -90,9 +90,13 @@ class RegionInjector(domain: mutable.Set[CFGPosition], } { r match { case stackRegion: StackRegion => - val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) - evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => - reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, b2), n) + println(s"StackRegion: ${stackRegion.start}") + println(s"BitVecLiteral: ${b}") + if (b.size == stackRegion.start.size) { + val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) + evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => + reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, b2), n) + } } case dataRegion: DataRegion => val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start, globalOffsets), b) @@ -159,7 +163,6 @@ class RegionInjector(domain: mutable.Set[CFGPosition], res ++= exprToRegion(load.index, i) case binaryExpr: BinaryExpr => res ++= reducibleToRegion(binaryExpr, i) - res ++= exprToRegion(i.rhs, i) case _ => // also treat as a region (for now) even if just Base + Offset without memLoad res ++= exprToRegion(i.rhs, i) } @@ -205,6 +208,7 @@ class RegionInjector(domain: mutable.Set[CFGPosition], Logger.warn(s"Multiple regions found for memory store: ${regions}") expr } else { + Logger.warn(s"MemStore is: ${cmd}") Logger.warn(s"No region found for memory store") expr } @@ -218,6 +222,7 @@ class RegionInjector(domain: mutable.Set[CFGPosition], Logger.warn(s"Multiple regions found for memory load: ${regions}") expr } else { + Logger.warn(s"MemLoad is: ${cmd}") Logger.warn(s"No region found for memory load") expr } diff --git a/src/main/scala/analysis/solvers/FixPointSolver.scala b/src/main/scala/analysis/solvers/FixPointSolver.scala index f9bfdbd9f..d7f40a6e3 100644 --- a/src/main/scala/analysis/solvers/FixPointSolver.scala +++ b/src/main/scala/analysis/solvers/FixPointSolver.scala @@ -248,10 +248,8 @@ trait PushDownWorklistFixpointSolver[N, T, L <: Lattice[T]] extends MapLatticeSo val xn = x(n) val y = transfer(n, xn) - // Only propagate if there's a change - if (y != xn) { - for succ <- outdep(n) do propagate(y, succ) - } + // TODO: Only propagate if there's a change + for succ <- outdep(n) do propagate(y, succ) /** Worklist-based fixpoint solver. * * @tparam N diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 1dbcee6eb..180c4f6e2 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -576,7 +576,7 @@ object StaticAnalysis { println(s"Finished ConstProp with SSA at ${(System.nanoTime() - before) / 1000000} ms") Logger.info("[!] Running MRA") - val mraSolver = MemoryRegionAnalysisSolver(IRProgram, globalAddresses, globalOffsets, mergedSubroutines, constPropResult, ANRResult, RNAResult, regionAccessesAnalysisResults, reachingDefinitionsAnalysisResults) + val mraSolver = MemoryRegionAnalysisSolver(IRProgram, globalAddresses, globalOffsets, mergedSubroutines, constPropResult, ANRResult, RNAResult, regionAccessesAnalysisResults, reachingDefinitionsAnalysisResults, maxDepth = 3) val mraResult = mraSolver.analyze() Logger.info("[!] Running MMM") From d23172d218be5574880ebb6a801af8edacb9a5d0 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Mon, 17 Jun 2024 11:14:18 +1000 Subject: [PATCH 003/104] Initial Works on VSA --- src/main/scala/analysis/ActualVSA.scala | 311 ++++++++++++++++++ src/main/scala/analysis/BitVectorEval.scala | 20 +- .../scala/analysis/IrreducibleLoops.scala | 2 +- .../analysis/LoopConditionEvaluator.scala | 13 + src/main/scala/analysis/MemoryModelMap.scala | 3 + src/main/scala/analysis/UtilMethods.scala | 3 - src/main/scala/util/RunUtils.scala | 29 +- 7 files changed, 370 insertions(+), 11 deletions(-) create mode 100644 src/main/scala/analysis/ActualVSA.scala create mode 100644 src/main/scala/analysis/LoopConditionEvaluator.scala diff --git a/src/main/scala/analysis/ActualVSA.scala b/src/main/scala/analysis/ActualVSA.scala new file mode 100644 index 000000000..b4ec94071 --- /dev/null +++ b/src/main/scala/analysis/ActualVSA.scala @@ -0,0 +1,311 @@ +package analysis +import ir._ +import util._ +import scala.collection.mutable +import analysis.BitVectorEval._ + +class ActualVSA(program: Program, + constantPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], + mmm: MemoryModelMap) { + + enum Flag { + case CF // Carry Flag + case ZF // Zero Flag + case SF // Sign Flag + case PF // Parity Flag + case AF // Auxiliary Flag + case OF // Overflow Flag + } + + enum Bool3 { + case True + case False + case Maybe + } + + case class StridedInterval(s: BitVecLiteral, lb: BitVecLiteral, ub: BitVecLiteral) { + require(smt_bvule(lb, ub) == TrueLiteral, "Lower bound must be less than or equal to upper bound") + + // Meaning of a strided interval + def gamma: Set[BitVecLiteral] = { + smt_interval(lb, ub, s) + } + + override def toString: String = { + s"$s[$lb, $ub]" + } + + // Addition + def +(that: StridedInterval): StridedInterval = { + val newLb = smt_bvadd(this.lb, that.lb) + val newUb = smt_bvadd(this.ub, that.ub) + val newS = gcd(this.s, that.s) + StridedInterval(newS, newLb, newUb) + } + + // Bitwise NOT + def unary_~ : StridedInterval = { + StridedInterval(s, smt_bvnot(ub), smt_bvnot(lb)) + } + + // Bitwise AND + def &(that: StridedInterval): StridedInterval = { + val lbAnd = smt_bvand(this.lb, that.lb) + val ubAnd = smt_bvand(this.ub, that.ub) + StridedInterval(gcd(this.s, that.s), lbAnd, ubAnd) + } + + // join of two or more strided intervals + def join(that: StridedInterval): StridedInterval = { + val newLb = smt_min(this.lb, that.lb) + val newUb = smt_max(this.ub, that.ub) + val newS = gcd(this.s, that.s) + StridedInterval(newS, newLb, newUb) + } + + // Helper function to compute the greatest common divisor + private def gcd(a: BitVecLiteral, b: BitVecLiteral): BitVecLiteral = { + if (b.value == 0) a else gcd(b, smt_bvsmod(a, b)) + } + } + + + /** + * ValueSet class that represents a set of values. + * s is the stride + * l is the lower bound + * u is the upper bound + * [l, u] is the interval + * [l, u] \ s is the set of values + * 0[l,l] represents the singleton set {l} + */ + case class ValueSet(intervals: Set[StridedInterval]) { + + def gamma: Set[BitVecLiteral] = { + intervals.flatMap(_.gamma) + } + + // Union of two value sets + def union(that: ValueSet): ValueSet = { + ValueSet(this.intervals ++ that.intervals) + } + + // Intersection of two value sets + def intersect(that: ValueSet): ValueSet = { + val newIntervals = for { + a <- this.intervals + b <- that.intervals + inter = intersectIntervals(a, b) if inter.isDefined + } yield inter.get + ValueSet(newIntervals) + } + + // Intersection of two strided intervals + private def intersectIntervals(a: StridedInterval, b: StridedInterval): Option[StridedInterval] = { + val newLb = smt_max(a.lb, b.lb) + val newUb = smt_min(a.ub, b.ub) + val newS = smt_gcd(a.s, b.s) + if (smt_bvule(newLb, newUb) == TrueLiteral) Some(StridedInterval(newS, newLb, newUb)) else None + } + + // Addition of value sets + def +(that: ValueSet): ValueSet = { + val newIntervals = for { + a <- this.intervals + b <- that.intervals + } yield a + b + ValueSet(newIntervals) + } + + // Addition of a constant to a value set + def +(c: BitVecLiteral): ValueSet = { + val newIntervals = for { + a <- this.intervals + } yield StridedInterval(a.s, smt_bvadd(a.lb, c), smt_bvadd(a.ub, c)) // TODO: Should Stride change? + ValueSet(newIntervals) + } + } + + // top element of the lattice + private object ValueSetLattice { + val TOP: ValueSet = ValueSet(Set(StridedInterval(BitVecLiteral(BigInt(1), 64), BitVecLiteral(BigInt(0), 64), BitVecLiteral(BigInt(Long.MaxValue), 64)))) + val BOTTOM: ValueSet = ValueSet(Set()) + } + + + case class AlocEnv(R: MemoryRegion) + //private type AbsEnv = mutable.Map[Variable | MemoryRegion, ValueSet] | mutable.Map[MemoryRegion, AlocEnv] | mutable.Map[Flag, Bool3] + //private type AbsEnv = mutable.Map[Variable | MemoryRegion | Flag, ValueSet | AlocEnv | Bool3] + case class AbsEnv( + env1: mutable.Map[Variable | MemoryRegion, ValueSet], + env2: mutable.Map[MemoryRegion, AlocEnv], + env3: mutable.Map[Flag, Bool3] + ): + def join(that: AbsEnv): AbsEnv = { + AbsEnv( + env1 ++ that.env1, + env2 ++ that.env2, + env3 ++ that.env3 + ) + } + + /** + * ∗(vs, s): Returns a pair of sets (F, P). F represents the set of “fully accessed” a-locs: it + * consists of the a-locs that are of size s and whose starting addresses are in vs. P represents + * the set of “partially accessed” a-locs: it consists of (i) a-locs whose starting addresses are in + * vs but are not of size s, and (ii) a-locs whose addresses are in vs but whose starting addresses + * and sizes do not meet the conditions to be in F. [Reference VSA paper] + * + * @param vsR2 + * @param s + * @return + */ + private def dereference(vsR2: ValueSet, s: Int): (Set[MemoryRegion], Set[MemoryRegion]) = { + // TODO: size of dereference s is ignored (maybe it can be used to check overflows?) + // TODO: Global memory size can be retrieved from the symbol table and are of size s + // Map addresses to exact memory locations + val fullyAccessedLocations = vsR2.gamma.flatMap(address => mmm.findStackObject(address.value)) + + // Identify partially accessed locations (if any) + val partiallyAccessedLocations = vsR2.gamma.flatMap(address => mmm.findStackPartialAccessesOnly(address.value)) + + // Return the set of fully accessed locations and the set of partially accessed locations + (fullyAccessedLocations.diff(partiallyAccessedLocations).asInstanceOf[Set[MemoryRegion]], partiallyAccessedLocations.asInstanceOf[Set[MemoryRegion]]) + } + + private def RemoveLowerBounds(vs: ValueSet): ValueSet = { + val newIntervals = for { + a <- vs.intervals + } yield StridedInterval(a.s, BitVecLiteral(BigInt(0), a.ub.size), a.ub) + ValueSet(newIntervals) + } + + private def RemoveUpperBounds(vs: ValueSet): ValueSet = { + val newIntervals = for { + a <- vs.intervals + } yield StridedInterval(a.s, a.lb, BitVecLiteral(BigInt(Long.MaxValue), a.lb.size)) + ValueSet(newIntervals) + } + + private def joinValueSets(vs1: ValueSet, vs2: ValueSet): ValueSet = { + vs1.union(vs2) + } + + private def meetValueSets(vs1: ValueSet, vs2: ValueSet): ValueSet = { + vs1.intersect(vs2) + } + + def AbstractTransformer(in: AbsEnv, instruction: CFGPosition): AbsEnv = { + instruction match { + case p: Procedure => in + case b: Block => in + case c: Command => + c match + case statement: Statement => + statement match + case localAssign: LocalAssign => + localAssign.rhs match + case binOp: BinaryExpr => + if (binOp.arg1.isInstanceOf[Variable]) { + val R1 = localAssign.lhs + val R2 = binOp.arg1.asInstanceOf[Variable] + val c = evaluateExpression(binOp.arg2, constantPropResult(instruction)) + + // R1 = R2 + c + val out = in + val vs_R2: ValueSet = in.env1.getOrElseUpdate(R2, ValueSetLattice.BOTTOM) + out.env1(R1) = vs_R2 + c.get + out + } else { + in + } + case memoryLoad: MemoryLoad => + memoryLoad.index match + case binOp: BinaryExpr => + if (binOp.arg2.isInstanceOf[Variable]) { + val R1 = localAssign.lhs + val R2 = binOp.arg1.asInstanceOf[Variable] // TODO: Is R2 always a variable? + val out = in + getDefinition(binOp.arg2.asInstanceOf[Variable], instruction, reachingDefs).foreach { + case d: LocalAssign => + d.rhs match + case binOp2: BinaryExpr => + val c1 = evaluateExpression(binOp2.arg1, constantPropResult(instruction)) + val c2 = evaluateExpression(binOp2.arg2, constantPropResult(instruction)) + // R1 = *(R2 + c1) + c2 + val vs_R2: ValueSet = in.env1(R2) + val s = c2.get.size // TODO: s is the size of dereference performed by the instruction (I assume it is the same size as c2) + val (f: Set[MemoryRegion], p: Set[MemoryRegion]) = dereference(vs_R2 + c1.get, s) + if (p.isEmpty) { + val vs_rhs = f.map(in.env1(_)).reduce(joinValueSets) + out.env1(R1) = vs_rhs + c2.get + } else { + out.env1(R1) = ValueSetLattice.TOP + } + case _ => out + } + out + } else { + in + } + case _ => in // TODO: Handle other cases + case variable: Variable => + val R1 = localAssign.lhs + val R2 = variable + // R1 >= R2 + val out = in + val vs_R1 = in.env1.getOrElseUpdate(R1, ValueSetLattice.BOTTOM) + val vs_R2 = in.env1(R2) + val vs_lb = RemoveUpperBounds(vs_R2) + val vs_ub = RemoveLowerBounds(vs_R1) + out.env1(R1) = vs_R1.intersect(vs_lb) + out.env1(R2) = vs_R2.intersect(vs_ub) + out + case bitVecLiteral: BitVecLiteral => + val R1 = localAssign.lhs + val c = bitVecLiteral + // R1 <= c + val vs_c = ValueSet(Set(StridedInterval(smt_gcd(BitVecLiteral(BigInt(0), c.size), c), BitVecLiteral(BigInt(0), c.size), c))) // TODO: Fix ME + val out = in + out.env1(R1) = meetValueSets(in.env1(R1), vs_c) + out + case _ => in // TODO: Handle other cases + case memoryAssign: MemoryAssign => in // TODO: *(R1 + c1) = R2 + c2 + case nop: NOP => in + case assert: Assert => in + case assume: Assume => in + case jump: Jump => in + } + } + + def IntraProceduralVSA(): mutable.Map[CFGPosition, AbsEnv] = { + val worklist = new mutable.Queue[CFGPosition]() + worklist.enqueue(program.mainProcedure) + val absEnv_enter = AbsEnv(mutable.Map().withDefault(_ => ValueSetLattice.BOTTOM), mutable.Map(), mutable.Map()) + val abstractStates = mutable.Map[CFGPosition, AbsEnv](worklist.head -> absEnv_enter) + while(worklist.nonEmpty) { + val n: CFGPosition = worklist.dequeue() + val m = IntraProcIRCursor.succ(n) + for (succ <- m) { + val edge_amc = AbstractTransformer(abstractStates(n), succ) + Propagate(succ, edge_amc) + } + } + + def Propagate(n: CFGPosition, edge_amc: AbsEnv): Unit = { + if (!abstractStates.contains(n)) { + abstractStates(n) = edge_amc + worklist.enqueue(n) + } else { + val oldEnv = abstractStates(n) + val newEnv = oldEnv.join(edge_amc) + if (newEnv != oldEnv) { + abstractStates(n) = newEnv + worklist.enqueue(n) + } + } + } + abstractStates + } +} diff --git a/src/main/scala/analysis/BitVectorEval.scala b/src/main/scala/analysis/BitVectorEval.scala index 0b5847506..a3da4de13 100644 --- a/src/main/scala/analysis/BitVectorEval.scala +++ b/src/main/scala/analysis/BitVectorEval.scala @@ -1,7 +1,8 @@ package analysis -import ir._ +import ir.* import analysis.BitVectorEval.* +import scala.annotation.tailrec import scala.math.pow object BitVectorEval { @@ -328,4 +329,21 @@ object BitVectorEval { } } + def smt_min(s: BitVecLiteral, t: BitVecLiteral): BitVecLiteral = { + if (smt_bvslt(s, t) == TrueLiteral) s else t + } + + def smt_max(s: BitVecLiteral, t: BitVecLiteral): BitVecLiteral = { + if (smt_bvslt(s, t) == TrueLiteral) t else s + } + + @tailrec + def smt_gcd(a: BitVecLiteral, b: BitVecLiteral): BitVecLiteral = { + if (b.value == 0) a else smt_gcd(b, smt_bvsmod(a, b)) + } + + def smt_interval(lb: BitVecLiteral, ub: BitVecLiteral, step: BitVecLiteral): Set[BitVecLiteral] = { + require(smt_bvule(lb, ub) == TrueLiteral, "Lower bound must be less than or equal to upper bound") + (lb.value to ub.value by step.value).map(BitVecLiteral(_, lb.size)).toSet + } } diff --git a/src/main/scala/analysis/IrreducibleLoops.scala b/src/main/scala/analysis/IrreducibleLoops.scala index f3d3bb44e..7486cbf44 100644 --- a/src/main/scala/analysis/IrreducibleLoops.scala +++ b/src/main/scala/analysis/IrreducibleLoops.scala @@ -24,7 +24,7 @@ private def label(p: CFGPosition) = { * */ case class LoopEdge(from: CFGPosition, to: CFGPosition) { - override def toString: String = s"(${label(from)}, ${label(to)})" + override def toString: String = s"(${from}, ${to})" } /* A loop is a subgraph of a CFG diff --git a/src/main/scala/analysis/LoopConditionEvaluator.scala b/src/main/scala/analysis/LoopConditionEvaluator.scala new file mode 100644 index 000000000..2659c6ecd --- /dev/null +++ b/src/main/scala/analysis/LoopConditionEvaluator.scala @@ -0,0 +1,13 @@ +//package analysis +//import ir.* +//import util.* +// +//class LoopConditionEvaluator(context: Map[CFGPosition, Map[Variable, Set[BitVecLiteral]]], reachingDefs: Map[CFGPosition, Map[Variable, Set[LocalAssign]]]) { +// def evaluate(loop: Loop): Set[BitVecLiteral] = { +// val loopCondition = loop.condition +// val loopHeader = loop.header +// val loopHeaderContext = context(loopHeader) +// val loopConditionResult = evaluateExpressionWithSSA(loopCondition, loopHeaderContext, loopHeader, reachingDefs) +// loopConditionResult +// } +//} diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index d91340d5b..1636fd085 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -201,6 +201,9 @@ class MemoryModelMap { } } + def findStackPartialAccessesOnly(value: BigInt): Option[StackRegion] = { + stackMap.find((range, _) => range.start < value && value <= range.end).map((range, obj) => obj) + } def findStackObject(value: BigInt): Option[StackRegion] = stackMap.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => obj) diff --git a/src/main/scala/analysis/UtilMethods.scala b/src/main/scala/analysis/UtilMethods.scala index e6f138c12..12b46463b 100644 --- a/src/main/scala/analysis/UtilMethods.scala +++ b/src/main/scala/analysis/UtilMethods.scala @@ -12,7 +12,6 @@ import util.Logger * The evaluated expression (e.g. 0x69632) */ def evaluateExpression(exp: Expr, constantPropResult: Map[Variable, FlatElement[BitVecLiteral]]): Option[BitVecLiteral] = { - Logger.debug(s"evaluateExpression: $exp") exp match { case binOp: BinaryExpr => val lhs = evaluateExpression(binOp.arg1, constantPropResult) @@ -57,8 +56,6 @@ def evaluateExpression(exp: Expr, constantPropResult: Map[Variable, FlatElement[ } def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], n: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])]): Set[BitVecLiteral] = { - Logger.debug(s"evaluateExpression: $exp") - def apply(op: (BitVecLiteral, BitVecLiteral) => BitVecLiteral, a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = val res = for { x <- a diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 180c4f6e2..cbb9b1166 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -491,11 +491,11 @@ object StaticAnalysis { // reducible loops val detector = LoopDetector(IRProgram) val foundLoops = detector.identify_loops() - foundLoops.foreach(l => Logger.info(s"Loop found: ${l.name}")) + foundLoops.foreach(l => Logger.info(s"Loop found: ${l}")) val transformer = LoopTransform(foundLoops) val newLoops = transformer.llvm_transform() - newLoops.foreach(l => Logger.info(s"Loop found: ${l.name}")) + newLoops.foreach(l => Logger.info(s"Loop found: ${l}")) println(s"Finished Loop Transform at ${(System.nanoTime() - before)/1000000} ms") @@ -513,6 +513,11 @@ object StaticAnalysis { val domain = computeDomain(IntraProcIRCursor, IRProgram.procedures) + config.analysisDotPath.foreach { s => + writeToFile(cfg.toDot(x => x.toString, Output.dotIder), s"${s}_preCFG_${iteration}.dot") + writeToFile(printAnalysisResults(IRProgram, Map.empty), s"${s}_preCFG_$iteration.txt") + } + Logger.info("[!] Running ANR") val ANRSolver = ANRAnalysisSolver(IRProgram) val ANRResult = ANRSolver.analyze() @@ -613,10 +618,22 @@ object StaticAnalysis { ) }) - Logger.info("[!] Running VSA") - val vsaSolver = - ValueSetAnalysisSolver(IRProgram, globalAddresses, externalAddresses, globalOffsets, subroutines, mmm, constPropResult) - val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = vsaSolver.analyze() +// Logger.info("[!] Running VSA") +// val vsaSolver = +// ValueSetAnalysisSolver(IRProgram, globalAddresses, externalAddresses, globalOffsets, subroutines, mmm, constPropResult) +// val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = vsaSolver.analyze() + + val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = Map() + + val actualVSA = ActualVSA(IRProgram, constPropResult, reachingDefinitionsAnalysisResults, mmm) + val actualVSAResults: mutable.Map[CFGPosition, actualVSA.AbsEnv] = actualVSA.IntraProceduralVSA() + + config.analysisDotPath.foreach(s => { + writeToFile( + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> actualVSAResults.withDefaultValue(actualVSA.AbsEnv(mutable.Map(), mutable.Map(), mutable.Map())).get(b).toString).toMap), + s"${s}_ActualVSA$iteration.dot" + ) + }) Logger.info("[!] Running Interprocedural Live Variables Analysis") //val interLiveVarsResults = InterLiveVarsAnalysis(IRProgram).analyze() From c0be95bf15ad68a3144cb5f18881e9551f1b087a Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Wed, 26 Jun 2024 12:29:42 +1000 Subject: [PATCH 004/104] Changes to partial region accesses --- src/main/scala/analysis/ActualVSA.scala | 83 +++++++++++++------- src/main/scala/analysis/Lattice.scala | 17 ++++ src/main/scala/analysis/MemoryModelMap.scala | 70 ++++++++++++++++- 3 files changed, 139 insertions(+), 31 deletions(-) diff --git a/src/main/scala/analysis/ActualVSA.scala b/src/main/scala/analysis/ActualVSA.scala index b4ec94071..9954f04a3 100644 --- a/src/main/scala/analysis/ActualVSA.scala +++ b/src/main/scala/analysis/ActualVSA.scala @@ -24,6 +24,15 @@ class ActualVSA(program: Program, case Maybe } + /** + * SI class that represents a strided interval + * s is the stride + * l is the lower bound + * u is the upper bound + * [l, u] is the interval + * [l, u] \ s is the set of values + * 0[l,l] represents the singleton set {l} + */ case class StridedInterval(s: BitVecLiteral, lb: BitVecLiteral, ub: BitVecLiteral) { require(smt_bvule(lb, ub) == TrueLiteral, "Lower bound must be less than or equal to upper bound") @@ -55,6 +64,14 @@ class ActualVSA(program: Program, val ubAnd = smt_bvand(this.ub, that.ub) StridedInterval(gcd(this.s, that.s), lbAnd, ubAnd) } + + // Intersection of two strided intervals + def intersect(that: StridedInterval): Option[StridedInterval] = { + val newLb = smt_max(this.lb, that.lb) + val newUb = smt_min(this.ub, that.ub) + val newS = smt_gcd(this.s, that.s) + if (smt_bvule(newLb, newUb) == TrueLiteral) Some(StridedInterval(newS, newLb, newUb)) else None + } // join of two or more strided intervals def join(that: StridedInterval): StridedInterval = { @@ -70,15 +87,9 @@ class ActualVSA(program: Program, } } - /** - * ValueSet class that represents a set of values. - * s is the stride - * l is the lower bound - * u is the upper bound - * [l, u] is the interval - * [l, u] \ s is the set of values - * 0[l,l] represents the singleton set {l} + * A single value set is a map from regions to strided intervals + * @param intervals */ case class ValueSet(intervals: Set[StridedInterval]) { @@ -96,19 +107,11 @@ class ActualVSA(program: Program, val newIntervals = for { a <- this.intervals b <- that.intervals - inter = intersectIntervals(a, b) if inter.isDefined + inter = a.intersect(b) if inter.isDefined } yield inter.get ValueSet(newIntervals) } - // Intersection of two strided intervals - private def intersectIntervals(a: StridedInterval, b: StridedInterval): Option[StridedInterval] = { - val newLb = smt_max(a.lb, b.lb) - val newUb = smt_min(a.ub, b.ub) - val newS = smt_gcd(a.s, b.s) - if (smt_bvule(newLb, newUb) == TrueLiteral) Some(StridedInterval(newS, newLb, newUb)) else None - } - // Addition of value sets def +(that: ValueSet): ValueSet = { val newIntervals = for { @@ -130,15 +133,19 @@ class ActualVSA(program: Program, // top element of the lattice private object ValueSetLattice { val TOP: ValueSet = ValueSet(Set(StridedInterval(BitVecLiteral(BigInt(1), 64), BitVecLiteral(BigInt(0), 64), BitVecLiteral(BigInt(Long.MaxValue), 64)))) - val BOTTOM: ValueSet = ValueSet(Set()) + val BOTTOM: ValueSet = ValueSet(mmm.getAllRegions.map(r => Set())) // TODO: should be all regions mapped to empty set } - case class AlocEnv(R: MemoryRegion) + case class AlocEnv(allocs: Set[MemoryRegion]) { + def join(that: AlocEnv): AlocEnv = { + AlocEnv(this.allocs ++ that.allocs) + } + } //private type AbsEnv = mutable.Map[Variable | MemoryRegion, ValueSet] | mutable.Map[MemoryRegion, AlocEnv] | mutable.Map[Flag, Bool3] //private type AbsEnv = mutable.Map[Variable | MemoryRegion | Flag, ValueSet | AlocEnv | Bool3] case class AbsEnv( - env1: mutable.Map[Variable | MemoryRegion, ValueSet], + env1: mutable.Map[Variable, ValueSet], env2: mutable.Map[MemoryRegion, AlocEnv], env3: mutable.Map[Flag, Bool3] ): @@ -161,14 +168,13 @@ class ActualVSA(program: Program, * @param s * @return */ - private def dereference(vsR2: ValueSet, s: Int): (Set[MemoryRegion], Set[MemoryRegion]) = { - // TODO: size of dereference s is ignored (maybe it can be used to check overflows?) + private def dereference(vsR2: ValueSet, s: BigInt): (Set[MemoryRegion], Set[MemoryRegion]) = { // TODO: Global memory size can be retrieved from the symbol table and are of size s // Map addresses to exact memory locations - val fullyAccessedLocations = vsR2.gamma.flatMap(address => mmm.findStackObject(address.value)) + val fullyAccessedLocations = vsR2.gamma.flatMap(address => mmm.findStackFullAccessesOnly(address.value, s)) // Identify partially accessed locations (if any) - val partiallyAccessedLocations = vsR2.gamma.flatMap(address => mmm.findStackPartialAccessesOnly(address.value)) + val partiallyAccessedLocations = vsR2.gamma.flatMap(address => mmm.findStackPartialAccessesOnly(address.value, s)) // Return the set of fully accessed locations and the set of partially accessed locations (fullyAccessedLocations.diff(partiallyAccessedLocations).asInstanceOf[Set[MemoryRegion]], partiallyAccessedLocations.asInstanceOf[Set[MemoryRegion]]) @@ -228,22 +234,24 @@ class ActualVSA(program: Program, val R2 = binOp.arg1.asInstanceOf[Variable] // TODO: Is R2 always a variable? val out = in getDefinition(binOp.arg2.asInstanceOf[Variable], instruction, reachingDefs).foreach { - case d: LocalAssign => + d => d.rhs match case binOp2: BinaryExpr => val c1 = evaluateExpression(binOp2.arg1, constantPropResult(instruction)) val c2 = evaluateExpression(binOp2.arg2, constantPropResult(instruction)) // R1 = *(R2 + c1) + c2 val vs_R2: ValueSet = in.env1(R2) - val s = c2.get.size // TODO: s is the size of dereference performed by the instruction (I assume it is the same size as c2) - val (f: Set[MemoryRegion], p: Set[MemoryRegion]) = dereference(vs_R2 + c1.get, s) + val s = memoryLoad.size // s is the size of dereference performed by the instruction + val (f: Set[MemoryRegion], p: Set[MemoryRegion]) = dereference(vs_R2 + c1.get, BigInt(s)) + println("VSA") + println(f) if (p.isEmpty) { val vs_rhs = f.map(in.env1(_)).reduce(joinValueSets) out.env1(R1) = vs_rhs + c2.get } else { out.env1(R1) = ValueSetLattice.TOP } - case _ => out + case _ => } out } else { @@ -282,12 +290,29 @@ class ActualVSA(program: Program, def IntraProceduralVSA(): mutable.Map[CFGPosition, AbsEnv] = { val worklist = new mutable.Queue[CFGPosition]() worklist.enqueue(program.mainProcedure) - val absEnv_enter = AbsEnv(mutable.Map().withDefault(_ => ValueSetLattice.BOTTOM), mutable.Map(), mutable.Map()) + val allStackRegions: Set[StackRegion] = mmm.getAllStackRegions() + val allDataRegions: Set[DataRegion] = mmm.getAllDataRegions() + val allHeapRegions: Set[HeapRegion] = mmm.getAllHeapRegions() + + val allocatedStackRegions = AlocEnv(allStackRegions) + val allocatedDataRegions = AlocEnv(allDataRegions) + val allocatedHeapRegions = AlocEnv(allHeapRegions) + + val stackManyToOne = allStackRegions.map(r => r -> allocatedStackRegions).toMap + val dataManyToOne = allDataRegions.map(r => r -> allocatedDataRegions).toMap + val heapManyToOne = allHeapRegions.map(r => r -> allocatedHeapRegions).toMap + + val combinedMap = stackManyToOne ++ dataManyToOne ++ heapManyToOne + val flagsToMaybe = Flag.values.map(f => f -> Bool3.Maybe).toMap + + val absEnv_enter = AbsEnv(mutable.Map().withDefault(_ => ValueSetLattice.BOTTOM), mutable.Map() ++ combinedMap, mutable.Map() ++ flagsToMaybe) val abstractStates = mutable.Map[CFGPosition, AbsEnv](worklist.head -> absEnv_enter) while(worklist.nonEmpty) { val n: CFGPosition = worklist.dequeue() val m = IntraProcIRCursor.succ(n) for (succ <- m) { + mmm.popContext() + mmm.pushContext(IRWalk.procedure(n).name) val edge_amc = AbstractTransformer(abstractStates(n), succ) Propagate(succ, edge_amc) } diff --git a/src/main/scala/analysis/Lattice.scala b/src/main/scala/analysis/Lattice.scala index 0ef98020f..5c3ccd630 100644 --- a/src/main/scala/analysis/Lattice.scala +++ b/src/main/scala/analysis/Lattice.scala @@ -140,6 +140,23 @@ class TupleLattice[L1 <: Lattice[T1], L2 <: Lattice[T2], T1, T2](val lattice1: L override def top: (T1, T2) = (lattice1.top, lattice2.top) } +//trait StridedIntervalLattice[T] extends Lattice[(T, T, T)] { +// override val bottom: (T, T, T) = (???, ???, ???) +// +// override def lub(x: (T1, T2), y: (T1, T2)): (T1, T2) = { +// val (x1, x2) = x +// val (y1, y2) = y +// (lattice1.lub(x1, y1), lattice2.lub(x2, y2)) +// } +// +// override def leq(x: (T1, T2), y: (T1, T2)): Boolean = { +// val (x1, x2) = x +// val (y1, y2) = y +// lattice1.leq(x1, y1) && lattice2.leq(x2, y2) +// } +// +// override def top: (T1, T2) = (lattice1.top, lattice2.top) +//} /** A lattice of maps from a set of elements of type `A` to a lattice with element `L'. Bottom is the default value. */ diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index 1636fd085..fc719d945 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -201,8 +201,74 @@ class MemoryModelMap { } } - def findStackPartialAccessesOnly(value: BigInt): Option[StackRegion] = { - stackMap.find((range, _) => range.start < value && value <= range.end).map((range, obj) => obj) + /* All regions that either: + * 1. starts at value but size less than region size + * 2. starts at value but size more than region size (add both regions ie. next region) + * 3. starts between regions (start, end) and (value + size) => end + * 4. starts between regions (start, end) and (value + size) < end (add both regions ie. next region) + */ + def findStackPartialAccessesOnly(value: BigInt, size: BigInt): Set[StackRegion] = { + val matchingRegions = scala.collection.mutable.Set[StackRegion]() + + stackMap.foreach { case (range, region) => + // Condition 1: Starts at value but size less than region size + if (range.start == value && range.size > size) { + matchingRegions += region + } + // Condition 2: Starts at value but size more than region size (add subsequent regions) + else if (range.start == value && range.size < size) { + matchingRegions += region + var remainingSize = size - range.size + var nextStart = range.end + stackMap.toSeq.sortBy(_._1.start).dropWhile(_._1.start <= range.start).foreach { case (nextRange, nextRegion) => + if (remainingSize > 0) { + matchingRegions += nextRegion + remainingSize -= nextRange.size + nextStart = nextRange.end + } + } + } + // Condition 3: Starts between regions (start, end) and (value + size) => end + else if (range.start < value && (value + size) <= range.end) { + matchingRegions += region + } + // Condition 4: Starts between regions (start, end) and (value + size) < end (add subsequent regions) + else if (range.start < value && (value + size) > range.end) { + matchingRegions += region + var remainingSize = (value + size) - range.end + var nextStart = range.end + stackMap.toSeq.sortBy(_._1.start).dropWhile(_._1.start <= range.start).foreach { case (nextRange, nextRegion) => + if (remainingSize > 0) { + matchingRegions += nextRegion + remainingSize -= nextRange.size + nextStart = nextRange.end + } + } + } + } + + matchingRegions.toSet + } + + def getAllStackRegions: Set[StackRegion] = { + localStacks.values.toSet.flatten + } + + def getAllDataRegions: Set[DataRegion] = { + dataMap.values.toSet + } + + def getAllHeapRegions: Set[HeapRegion] = { + heapMap.values.toSet + } + + def getAllRegions: Set[MemoryRegion] = { + (getAllStackRegions ++ getAllDataRegions ++ getAllHeapRegions) + } + + /* All regions that start at value and are exactly of length size */ + def findStackFullAccessesOnly(value: BigInt, size: BigInt): Option[StackRegion] = { + stackMap.find((range, _) => range.start == value && range.size == size).map((range, obj) => obj) } def findStackObject(value: BigInt): Option[StackRegion] = From af21c7d03c53e8f3f5fc2260642ac0e9610250d5 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Wed, 6 Mar 2024 14:14:40 +1000 Subject: [PATCH 005/104] graph utilities --- src/main/scala/analysis/DSAGraph.scala | 314 ++++++++++++++++ src/main/scala/analysis/DSAUtility.scala | 352 ++++++++++++++++++ src/main/scala/analysis/Local.scala | 276 ++++++++++++++ src/main/scala/analysis/LocalDSA.scala | 131 +++++++ .../scala/analysis/PointerTypeAnalysis.scala | 83 +++++ src/main/scala/analysis/PrePass.scala | 184 +++++++++ src/main/scala/analysis/ReachingDefs.scala | 30 ++ src/main/scala/analysis/RegionBuilder.scala | 336 +++++++++++++++++ .../analysis/SymbolicAccessAnalysis.scala | 168 +++++++++ src/main/scala/analysis/WriteToAnalysis.scala | 50 +++ .../scala/analysis/solvers/IDESolver.scala | 5 +- src/main/scala/ir/IRCursor.scala | 2 +- .../scala/translating/ReadELFLoader.scala | 2 +- src/test/scala/RegionBuilderTests.scala | 28 ++ 14 files changed, 1958 insertions(+), 3 deletions(-) create mode 100644 src/main/scala/analysis/DSAGraph.scala create mode 100644 src/main/scala/analysis/DSAUtility.scala create mode 100644 src/main/scala/analysis/Local.scala create mode 100644 src/main/scala/analysis/LocalDSA.scala create mode 100644 src/main/scala/analysis/PointerTypeAnalysis.scala create mode 100644 src/main/scala/analysis/PrePass.scala create mode 100644 src/main/scala/analysis/ReachingDefs.scala create mode 100644 src/main/scala/analysis/RegionBuilder.scala create mode 100644 src/main/scala/analysis/SymbolicAccessAnalysis.scala create mode 100644 src/main/scala/analysis/WriteToAnalysis.scala create mode 100644 src/test/scala/RegionBuilderTests.scala diff --git a/src/main/scala/analysis/DSAGraph.scala b/src/main/scala/analysis/DSAGraph.scala new file mode 100644 index 000000000..522e04881 --- /dev/null +++ b/src/main/scala/analysis/DSAGraph.scala @@ -0,0 +1,314 @@ +//package analysis +// +//import analysis.Node.getNextId +//import com.sun.org.apache.xalan.internal.xsltc.compiler.util.NodeType +//import ir.{Expr, Procedure, Register, Variable} +// +//import scala.collection.mutable +// +//// need a type procedure +// +////type Pointer = Cell | Variable +// +///** +// * DSA Graph +// */ +//class Graph(val procedure: Procedure) { +// +// val nodes: mutable.Set[Node] = mutable.Set() +// val pointersToCells: mutable.Map[Variable, Cell] = mutable.Map() +// // TODO refactor the one below +// // If cells change i don't think this will work. +// var pointsToRelations: mutable.Map[Cell, Cell] = mutable.Map() +// +// +// +// /** +// * +// * @param node +// * @return Set[(node, offset_i)_pointer, cell_pointee) +// */ +// def getPointees(node: Node): Set[(Cell, Cell)] = { +// pointsToRelations.foldLeft(Set(): Set[(Cell, Cell)]) { +// (s, m) => +// m match +// case (key, value) => +// if node.cells.keys.toSet.contains(key) then s.+((key, value)) else s +// } +// } +// +// def getPointers(node: Node): Set[(Cell, Cell)] = { +// pointsToRelations.foldLeft(Set(): Set[(Cell, Cell)]) { +// (s, m) => +// m match +// case (key, value) => +// if node.cells.values.toSet.contains(value) then s.+((key, value)) else s +// } +// } +// +// def pointTo(pointer: Cell, pointee: Option[Cell]): Unit = { +//// pointer.pointTo(pointee) +// pointee match +// case Some(value) => +// pointsToRelations.put(pointer, value) +// case None => pointsToRelations.remove(pointer) +// } +// +// +// def makeNode(memoryRegion2: Option[MemoryRegion2] = None): Node = { +// val node = Node(memoryRegion2, this) +// nodes.add(node) +// node +// } +// +// def makeCell(memoryRegion2: Option[MemoryRegion2] = None): Cell = { +// val node = makeNode(memoryRegion2) +// node.cell() +// } +// +// def unify(variable: Variable, cell: Cell): Unit = { +//// if !pointersToCells.contains(variable) then +//// pointersToCells.put(variable, cell) +//// else +//// pointersToCells(variable).unify(cell) +// getVariablePointee(variable).unify(cell) +// } +// +// def validateGraph(): Unit = { +// pointersToCells.values.toSet.union(pointsToRelations.keys.toSet.union(pointsToRelations.values.toSet)).foreach(validateCell) +// } +// +// def validateCell(cell: Cell): Unit = { +// assert(cell.node.isDefined) +// val node = cell.node.get +// println(cell) +// assert(nodes.contains(node)) +// assert(node.cells.contains(cell.offset)) +// assert(node.cells(cell.offset).equals(cell)) +// } +// +// def collapsePointer(pointer: Variable): Unit = { +// val cell = makeCell() +// cell.node.get.collapseNode() +// unify(pointer, cell) +// } +// +// def getVariablePointee(v: Variable): Cell = { +// pointersToCells.getOrElseUpdate(v, makeCell()) +// } +// +// def getCellPointee(c: Cell): Cell = { +// pointsToRelations.getOrElseUpdate(c, makeCell()) +// } +//} +// +//object Node { +// private var idCounter : Int = 0; +// private def getNextId: Int = { +// idCounter += 1 +// idCounter +// } +//} +// +// +///** +// * DSA Node represents a memory object +// */ +//class Node (var memoryRegion2: Option[MemoryRegion2], val owner: Graph) { +// val id = getNextId +// var cells: mutable.Map[BigInt, Cell] = mutable.Map() +// +//// var cells: mutable.Set[Cell] = mutable.Set() +// private val flags: NodeFlags = NodeFlags() +// var size: BigInt = memoryRegion2 match +// case Some(value) => // TODO get sizes of data regions and stack regions +// value match +// case DataRegion2(regionIdentifier, start) => 8 +// case HeapRegion2(regionIdentifier, proc, size) => size.value +// case StackRegion2(regionIdentifier, proc, size) => size.value +// case _ => 8 +// case None => 8 +// +// +// override def toString: String = s"Node($id, $memoryRegion2, $size)" +// +// override def equals(obj: Any): Boolean = +// obj match +// case n: Node => id == n.id +// case _ => false +// +// +// def links: IterableOnce[BigInt] = +// cells.keys +// +// def offsetHelper(offset1: BigInt, offset2: BigInt): BigInt = { +// if isCollapsed then +// 0 +// else if isSeq then +// (offset1 + offset2) % size +// else +// offset1 + offset2 +// } +// +// def redirectEdges(node: Node, offset: BigInt): Unit = { +// owner.getPointers(this).foreach( +// (pointer, pointee) => +// val newCell = node.cell(node.offsetHelper(offset, pointee.offset)) +// owner.pointTo(pointer, Some(newCell)) +// owner.pointersToCells.foreach( +// (key, value) => +// if value.equals(pointee) then owner.pointersToCells.put(key, newCell) +// ) +// ) +// +// owner.getPointees(this).foreach( +// (pointer, pointee) => +// val newCell = node.cell(node.offsetHelper(offset, pointer.offset)) +// if owner.pointsToRelations.contains(newCell) then +// pointee.unify(owner.pointsToRelations(newCell)) +// else +// owner.pointTo(newCell, Some(pointee)) +// ) +// +// owner.nodes.remove(this) +// owner.pointsToRelations = owner.pointsToRelations.filter( +// (key, value) => !(key.equals(this) && value.equals(this)) +// ) +// +// +// } +// def collapseNode(): Unit = { +// val cell = owner.makeCell(None) +// cells.foreach( +// (offset, c) => +// cell.unify(owner.getCellPointee(c)) +// owner.pointTo(c, None) +// ) +// size = 1 +// flags.collapsed = true +// owner.pointTo(this.cell(), Some(cell)) +// } +// +// def collapse(node: Node, offset: BigInt): Unit = { +// node.collapseNode() +// redirectEdges(node, offset) +// } +// +// def unify(node: Node, offset: BigInt = 0): Unit = { +//// owner.validateGraph() +// println(node) +// println(this) +// this.memoryRegion2 = node.memoryRegion2 +// val updatedOffset = offsetHelper(offset, 0) +// if (isCollapsed && !node.isCollapsed) { +// return collapse(node, updatedOffset) +// } else if (!isCollapsed && !node.isCollapsed) { +// if (isSeq && !node.isSeq) { +// if updatedOffset == 0 then node.unify(this) else return collapse(node, updatedOffset) +// } else if (!isSeq && node.isSeq) { +// if size % node.size == 0 then +// flags.seq = true +// return unify(node, offset) +// else if size + updatedOffset > node.size then +// return collapse(node, updatedOffset) +// } else if (isSeq && node.isSeq) { +// if size < node.size then return node.unify(this, 0) +// else if node.size % size != 0 || offsetHelper(offset, 0) > 0 then return collapse(node, updatedOffset) +// } +// } +// +// if this.equals(node) && updatedOffset > 0 then return node.collapseNode() +// redirectEdges(node, updatedOffset) +//// owner.validateGraph() +// } +// +// +// def cell(offset: BigInt = 0): Cell = { +// cells.getOrElseUpdate(offset, +// makeCell(offset) +// ) +// } +// +// +// private def makeCell(offset: BigInt = 0): Cell = { +// val cell = Cell(Some(this), offset) +// cells.update(offset, cell) +// cell +// } +// +// def updateSize(s: BigInt): Unit = { +// if isSeq && size != s then +// collapseNode() +// else if (!isSeq && s > size) then +// size = s +// } +// +// +// def isCollapsed = flags.collapsed +// def isSeq = flags.seq +// +// def setSeq(value: Boolean = true): Unit = { +// flags.seq = value +// } +// +//} +// +///** +// * Node flags +// */ +//class NodeFlags { +// var collapsed = false +// var seq = false +// def join(n: NodeFlags): Unit = { +// +// } +//} +// +///** +// * A memory cell (or a field). An offset into a memory object. +// */ +//class Cell(var node: Option[Node] = None, val offset: BigInt = 0) { +// +//// private var pointsTo: Option[Cell] = None +// private def n = node.get +// +// override def toString: String = s"Cell($node, $offset)" +// +//// def this(cell: Cell) = { +//// this(cell.node, cell.offset) +////// pointsTo = cell.pointsTo +//// } +//// +//// def this(cell: Cell, offset: BigInt) = { +//// this(cell.node, cell.offset + offset) +////// pointsTo = cell.pointsTo +//// } +//// +//// def this(node: Node, offset : BigInt) = { +//// this(Some(node), offset) +//// } +// +// +// override def equals(obj: Any): Boolean = { +// obj match +// case cell: Cell => cell.node.equals(this.node) && cell.offset == this.offset +// case _ => false +// } +// +// def unify(cell: Cell): Unit = { +// if (offset < cell.offset) then +// n.unify(cell.n, cell.offset - offset) +// else if (cell.offset < offset) then +// cell.n.unify(n, offset-cell.offset) +// else +// n.unify(cell.n) +// } +// +//} +// +///** +// * Simulation relation mapping +// */ +//class SimulationMap { +// +//} diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala new file mode 100644 index 000000000..7bce8e920 --- /dev/null +++ b/src/main/scala/analysis/DSAUtility.scala @@ -0,0 +1,352 @@ +package analysis + +import ir.{BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, IntraProcIRCursor, LocalAssign, MemoryAssign, MemoryStore, Procedure, Register, Variable, computeDomain, toShortString} +import specification.{ExternalFunction, SpecGlobal} + +import scala.collection.mutable; + +object NodeCounter { + var counter: Int = 0 + + def getCounter: Int = + counter = counter + 1 + if counter == 64 then + print("") + counter + + +} + + + +class DSG(proc: Procedure, + constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + globals: Set[SpecGlobal], globalOffsets: Map[BigInt, BigInt], + externalFunctions: Set[ExternalFunction], + reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], + writesTo: Map[Procedure, Set[Register]]) { + // DSNodes owned by this graph + val nodes: mutable.Set[DSN] = mutable.Set() + val pointTo: mutable.Map[DSC, DSC] = mutable.Map() + + val mallocRegister = Register("R0", BitVecType(64)) + val stackPointer = Register("R31", BitVecType(64)) + + + // make all globals + private val swappedOffsets = globalOffsets.map(_.swap) + val globalMapping: mutable.Map[(BigInt, BigInt), DSN] = globals.foldLeft(mutable.Map[(BigInt, BigInt), DSN]()) { + (m, global) => + var address: BigInt = global.address + if swappedOffsets.contains(address) then + address = swappedOffsets(address) + m + ((address, address + global.size) -> DSN(Some(this), Some(DataRegion2(global.name, address, global.size)))) + } + externalFunctions.foreach( + external => + var address: BigInt = external.offset + if swappedOffsets.contains(address) then + address = swappedOffsets(address) + globalMapping.update((address, address), DSN(Some(this), Some(DataRegion2(external.name, address, 0)))) + ) + + + // determine if an address is a global and return the corresponding global if it is. + def isGlobal(address: BigInt): Option[DSN] = + for (elem <- globalMapping) { + val range = elem._1 + if address >= range._1 && address <= range._2 then + return Some(elem._2) + } + None + + private def replaceInEV(oldCell: DSC, newCell: DSC) = + varToCell.foreach( + (pos, m) => + m.foreach( + (variable, cell) => + if cell.equals(oldCell) then + m.update(variable, newCell) + ) + ) + + private def replaceInPointTo(oldCell: DSC, newCell: DSC) = + pointTo.foreach { + case (pointer, pointee) => + if pointee.equals(oldCell) then + pointTo.update(pointer, newCell) + } + + def getPointee(cell: DSC): DSC = + if !pointTo.contains(cell) then + val node = DSN(None, None) + pointTo.update(cell, node.cells(0)) + pointTo(cell) + + +// private def earlyCollapse(node: DSN): Unit = +// node.collapsed = true +// node.cells.clear() +// +// node.addCell(0, 0) + + def collapseNode(node: DSN): Unit = + val collapedCell = DSC(Option(node), 0, true) + val e = DSC(None, 0) + + if node.id == 20 then + print("") + + + + val cell = node.cells.foldLeft(e) { + (c, field) => + + if pointTo.contains(field._2) && pointTo(field._2) == field._2 then + pointTo.update(field._2, collapedCell) + c + else if pointTo.contains(field._2) then + mergeCells(c, getPointee(field._2)) + else + c + } + + node.cells.values.foreach( + cell => + replaceInEV(cell, collapedCell) + replaceInPointTo(cell, collapedCell) + pointTo.foreach { + case (pointer, pointee) => + if pointer.equals(cell) then + pointTo.remove(pointer) + pointTo.update(collapedCell, pointee) + } + ) + + node.collapsed = true + + + node.cells.clear() + node.cells.addOne(0, collapedCell) + if cell.node.isDefined then + pointTo.update(node.cells(0), cell) + + + def mergeCells(cell1: DSC, cell2: DSC): DSC = + if (cell1 == cell2) { + return cell1 + } + if (incompatibleTypes(cell1, cell2)) then + collapseNode(cell2.node.get) + + if cell1.node.isDefined then + cell2.node.get.allocationRegions.addAll(cell1.node.get.allocationRegions) + + + if cell2.node.get.collapsed then + if cell1.node.isDefined then + cell1.node.get.cells.foreach { + case (offset, cell) => + if pointTo.contains(cell) then + if pointTo.contains(cell2.node.get.cells(0)) then + mergeCells(getPointee(cell), getPointee(cell2.node.get.cells(0))) + else + pointTo.update(cell2.node.get.cells(0), getPointee(cell)) + pointTo.remove(cell) + replaceInPointTo(cell, cell2.node.get.cells(0)) + replaceInEV(cell, cell2.node.get.cells(0)) + } + cell2.node.get.cells(0) + else + if pointTo.contains(cell1) then + if pointTo.contains(cell2.node.get.cells(0)) then + mergeCells(getPointee(cell1), getPointee(cell2.node.get.cells(0))) + else + pointTo.update(cell2.node.get.cells(0), getPointee(cell1)) + pointTo.remove(cell1) + replaceInPointTo(cell1, cell2.node.get.cells(0)) + replaceInEV(cell1, cell2.node.get.cells(0)) + cell2.node.get.cells(0) + else + cell1.node.get.cells.foreach { + case (offset, cell) => + if pointTo.contains(cell) then + if pointTo.contains(cell2.node.get.cells(offset)) then + mergeCells(getPointee(cell), getPointee(cell2.node.get.cells(offset))) + else + pointTo.update(cell2.node.get.cells(offset), getPointee(cell)) + pointTo.remove(cell) + replaceInPointTo(cell, cell2.node.get.cells(offset)) + replaceInEV(cell, cell2.node.get.cells(offset)) + } + cell2 + + + private def incompatibleTypes(cell1: DSC, cell2: DSC): Boolean = + if cell2.node.get.collapsed then + return false + else if cell1.node.isEmpty || (cell1.collapsedCell && !cell2.collapsedCell) then + return true // TODO not sure about this + else if cell1.offset != cell2.offset then + return true + else if cell1.node.get.cells.size != cell2.node.get.cells.size then + return true + else + (cell1.node.get.cells zip cell2.node.get.cells).foreach { //TODO remove unaccessed cells from type matching/allow unaccessed fields to merge with an accessed field + case ((o1, c1), (o2, c2)) => + if o1 != o2 || !c1.accessedSizes.equals(c2.accessedSizes) then + return true + } + false + + + private def isFormal(pos: CFGPosition, variable: Variable): Boolean = + variable != stackPointer && !reachingDefs(pos).contains(variable) + + val formals: mutable.Map[Variable, DSC] = mutable.Map() + val varToCell: Map[CFGPosition, mutable.Map[Variable, DSC]] = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(Map[CFGPosition, mutable.Map[Variable, DSC]]()) { + (m, pos) => + pos match + case LocalAssign(variable, value , label) => + if pos.asInstanceOf[LocalAssign].label.get.startsWith("%0000044f") then + print("") + value.variables.foreach( + v => + if isFormal(pos, v) then + val node = DSN(Some(this), None) + node.rep = "formal" + nodes.add(node) + formals.update(v, node.cells(0)) + ) + val node = DSN(Some(this), None) + node.rep = "ssa" + m + (pos -> mutable.Map(variable -> node.cells(0))) + case DirectCall(proc, target, label) if proc.name == "malloc" => + val node = DSN(Some(this), None) + node.rep = "ssa" + m + (pos -> mutable.Map(mallocRegister -> node.cells(0))) + case DirectCall(proc, target, label) if writesTo.contains(proc) => + val result: Map[Variable, DSC] = writesTo(proc).foldLeft(Map[Variable, DSC]()){ + (n, variable) => + val node = DSN(Some(this), None) + node.rep = "ssa" + n + (variable -> node.cells(0)) + } + m + (pos -> result.to(mutable.Map)) + case MemoryAssign(memory, MemoryStore(mem, index, value: Variable, endian, size), label) => + if isFormal(pos, value) then + val node = DSN(Some(this), None) + node.rep = "formal" + nodes.add(node) + formals.update(value, node.cells(0)) + m + case _ => m + } + + +// def coolMergeCells(cell1: DSC, cell2: DSC): DSC = +// val changedFormals = formals.foldLeft(Set[Variable]()) { +// (s, f) => +// if f._2 == cell1 then +// s + f._1 +// else +// s +// } +// +// val changedVars = varToCell.foldLeft(Map[CFGPosition, Set[Variable]]()) { +// +// } +// +// cell1 + + + + + + def addNode(memoryRegion2: MemoryRegion2, offset: BigInt, size: Int): DSN = ??? +// if nodes.contains(memoryRegion2) then +// nodes(memoryRegion2).addCell(offset, size) +// else +// val node = DSN(Some(this), Some(memoryRegion2)) +// nodes.update(memoryRegion2, node) +// node.addCell(offset, size) +// nodes(memoryRegion2) +} + +class DSN(val graph: Option[DSG], var region: Option[MemoryRegion2]) { + + val id: Int = NodeCounter.getCounter + + var collapsed = false + + val allocationRegions: mutable.Set[MemoryRegion2] = region match + case Some(value) => mutable.Set(value) + case None => mutable.Set() + + var rep: String = "" + + var size: BigInt = region match + case Some(value) => value match + case DataRegion2(regionIdentifier, start, size) => size + case HeapRegion2(regionIdentifier, proc, size) => size + case StackRegion2(regionIdentifier, proc, size) => size + case UnknownRegion2(regionIdentifier, proc) => 0 + case None => 0 + + val cells: mutable.Map[BigInt, DSC] = mutable.Map() + this.addCell(0, 0) + + def updateSize(newSize: BigInt): Unit = + + if newSize > size then + size = newSize + def addCell(offset: BigInt, size: Int) = + this.updateSize(offset + size) + if !cells.contains(offset) then + val cell = DSC(Some(this), offset) + cells.update(offset, cell) + cell.addAccessedSize(size) + else + cells(offset).addAccessedSize(size) + if cells(offset).accessedSizes.size > 1 then + graph.get.collapseNode(this) + + + override def equals(obj: Any): Boolean = + obj match + case node: DSN => + this.id == node.id + case _ => false + + override def toString: String = s"Node($id, $allocationRegions ${if collapsed then ", collapsed" else ""})" +} + +case class DSC(node: Option[DSN], offset: BigInt, collapsedCell: Boolean = false) +{ + val accessedSizes: mutable.Set[Int] = mutable.Set() + def addAccessedSize(size: Int): Unit = + if size != 0 then accessedSizes.add(size) + + + override def toString: String = s"Cell(${if node.isDefined then node.get.toString else "NONE"}, $offset)" +} + +class SimulationMapper +{ + +} + +class Field {} + + +class Offset +{} + +class Alloc +{} + +class CallSite +{ + +} + diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala new file mode 100644 index 000000000..7a887b58e --- /dev/null +++ b/src/main/scala/analysis/Local.scala @@ -0,0 +1,276 @@ +package analysis + +import ir.{BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IntraProcIRCursor, Literal, LocalAssign, Memory, MemoryAssign, MemoryLoad, MemoryStore, Procedure, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, computeDomain, toShortString} +import specification.{ExternalFunction, SpecGlobal} + +import scala.util.control.Breaks.{break, breakable} +import java.math.BigInteger + +class Local( + proc: Procedure, + symResults: Map[CFGPosition, Map[SymbolicAccess, TwoElement]], + constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + globals: Set[SpecGlobal], globalOffsets: Map[BigInt, BigInt], + externalFunctions: Set[ExternalFunction], + reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], + writesTo: Map[Procedure, Set[Register]] + ) extends Analysis[Any]{ + + val bitvecnegative: BigInt = new BigInt(new BigInteger("9223372036854775808")) + val mallocRegister = Register("R0", BitVecType(64)) + val stackPointer = Register("R31", BitVecType(64)) + + + val varToSym: Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]] = symResults.foldLeft(Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]]()) { + (outerMap, syms) => + val position = syms._1 + val innerMap = syms._2.foldLeft(Map[Variable, Set[SymbolicAccess]]()) { + (m, access) => + val b = position + if (m.contains(access._1.accessor)) then + m + (access._1.accessor -> (m(access._1.accessor) + access._1)) + else + m + (access._1.accessor -> Set(access._1)) + } + + outerMap + (position -> innerMap) + } + + def decToBinary(n: BigInt): Array[Int] = { + val binaryNum: Array[Int] = new Array[Int](64) + var i = 0 + var num = n + while (num > 0) { + binaryNum(i) = (num % BigInt(2)).intValue + num = num / 2 + i += 1 + } + binaryNum + } + + def twosComplementToDec(binary: Array[Int]): BigInt = { + var result: BigInt = BigInt(0) + var counter: Int = 0 + binary.foreach( + n => + if counter == binary.length - 1 && n == 1 then + result = result - BigInt(2).pow(counter) + else if n == 1 then + result = result + BigInt(2).pow(counter) + counter += 1 + ) + result + } + + var mallocCount: Int = 0 + + private def nextMallocCount = { + mallocCount += 1 + s"malloc_$mallocCount" + } + + val graph = DSG(proc, constProp, globals, globalOffsets, externalFunctions, reachingDefs, writesTo) + + + def isGlobal(expr: Expr, pos: CFGPosition, size: Int = 0): Option[DSC] = + if evaluateExpression(expr, constProp(pos)).isDefined && graph.isGlobal(evaluateExpression(expr, constProp(pos)).get.value).isDefined then + val address = evaluateExpression(expr, constProp(pos)).get.value + val node: DSN = graph.isGlobal(evaluateExpression(expr, constProp(pos)).get.value).get + val baseAddress = node.region.get.asInstanceOf[DataRegion2].start + val offset = address - baseAddress + node.addCell(offset, size) + if node.collapsed then + Some(node.cells(0)) + else + Some(node.cells(offset)) + else + None + + def getCells(pos: CFGPosition, arg: Variable): Set[DSC] = + if reachingDefs(pos).contains(arg) then + reachingDefs(pos)(arg).foldLeft(Set[DSC]()) { + (s, defintion) => + s + graph.varToCell(defintion)(arg) + } + else + Set(graph.formals(arg)) + + def getNodes(pos: CFGPosition, arg: Variable): Set[DSN] = + if reachingDefs(pos).contains(arg) then + reachingDefs(pos)(arg).foldLeft(Set[DSN]()){ + (s, defintion) => + s + graph.varToCell(defintion)(arg).node.get + } + else + Set(graph.formals(arg).node.get) + + def visit(n: CFGPosition): Unit = { + n match + case DirectCall(proc, target, label) if proc.name == "malloc" => + val size: BigInt = evaluateExpression(mallocRegister, constProp(n)) match + case Some(value) => value.value + case None => 0 + val node = DSN(Some(graph), Some(HeapRegion2(nextMallocCount, proc, size))) + graph.nodes.add(node) + val cell = graph.mergeCells(graph.varToCell(n)(mallocRegister), node.cells(0)) + graph.varToCell(n).update(mallocRegister, cell) + + case LocalAssign(variable, expr, maybeString) => + val lhsCell = graph.varToCell(n)(variable) + if isGlobal(expr, n).isDefined then + val global = isGlobal(expr, n).get + val result = graph.mergeCells(lhsCell, global) + graph.varToCell(n).update(variable, result) + else + expr match + case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) && arg1.equals(stackPointer) + && evaluateExpression(arg2, constProp(n)).isDefined && evaluateExpression(arg2, constProp(n)).get.value >= bitvecnegative => + val size = twosComplementToDec(decToBinary(evaluateExpression(arg2, constProp(n)).get.value)) + val node = DSN(Some(graph), Some(StackRegion2("Stack_"+proc.name, proc, -size))) + graph.nodes.add(node) + val cell = graph.mergeCells(lhsCell, node.cells(0)) + graph.varToCell(n).update(variable, cell) + case BinaryExpr(op, arg1: Variable, arg2) if varToSym(n).contains(arg1) && evaluateExpression(arg2, constProp(n)).isDefined => + val offset = evaluateExpression(arg2, constProp(n)).get.value + val nodes: Set[DSN] = getNodes(n, arg1) + nodes.foreach(_.addCell(offset, 0)) + val cell = nodes.foldLeft(lhsCell){ + (c, node) => + var field = offset + node.addCell(offset, 0) + if node.collapsed then + field = 0 + graph.mergeCells(c, node.cells(field)) // TODO this causing everything to collapse + } + graph.varToCell(n).update(variable, cell) + + case arg: Variable if varToSym(n).contains(arg) => + val cells = getCells(n, arg) + + val cell = cells.foldLeft(lhsCell){ + (c, p) => + graph.mergeCells(c, p) // TODO this causing everything to collapse + } + graph.varToCell(n).update(variable, cell) + + case MemoryLoad(mem, index, endian, size) => + val byteSize = (size.toDouble/8).ceil.toInt + if isGlobal(index, n, byteSize).isDefined then + val global = isGlobal(index, n, byteSize).get + val result = graph.mergeCells(lhsCell, graph.getPointee(global)) + graph.varToCell(n).update(variable, result) + else + index match + case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => + assert(varToSym(n).contains(arg1)) + val offset = evaluateExpression(arg2, constProp(n)).get.value + val nodes: Set[DSN] = getNodes(n, arg1) + nodes.foreach(_.addCell(offset, byteSize)) + val cell = nodes.foldLeft(lhsCell){ + (c, node) => + var field = offset + node.addCell(offset, byteSize) + if node.collapsed then + field = 0 + graph.mergeCells(c, graph.getPointee(node.cells(field))) // TODO this causing everything to collapse + } + graph.varToCell(n).update(variable, cell) + case arg: Variable => + assert(varToSym(n).contains(arg)) + val cells: Set[DSC] = getCells(n, arg) + + val cell = cells.foldLeft(lhsCell){ + (c, p) => + graph.mergeCells(c, graph.getPointee(p)) // TODO this causing everything to collapse + } + graph.varToCell(n).update(variable, cell) + case _ => ??? + case _ => + breakable { + var containsPointer = false + for (v <- expr.variables) { + if varToSym(n).contains(v) then + containsPointer = true + break + } + if containsPointer then + val cell = expr.variables.foldLeft(lhsCell) { + (c, v) => + val cells: Set[DSC] = getCells(n, v) + + cells.foldLeft(c) { + (c, p) => + graph.mergeCells(c, p) // TODO this causing everything to collapse + } + } + val node = cell.node.get + graph.collapseNode(node) + graph.varToCell(n).update(variable, node.cells(0)) + } + case MemoryAssign(memory, MemoryStore(mem, index, value: Variable, endian, size), label) => + val byteSize = (size.toDouble/8).ceil.toInt + val addressCell: DSC = + if isGlobal(index, n, byteSize).isDefined then + isGlobal(index, n, byteSize).get + else + index match + case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => + assert(varToSym(n).contains(arg1)) + val offset = evaluateExpression(arg2, constProp(n)).get.value + val nodes: Set[DSN] = getNodes(n, arg1) + nodes.foreach(_.addCell(offset, byteSize)) + val cell = nodes.foldLeft(DSN(Some(graph), None).cells(0)) { + (c, node) => + var field = offset + node.addCell(offset, byteSize) + if node.collapsed then + field = 0 + graph.mergeCells(c, node.cells(field)) // TODO this causing everything to collapse + } + cell + case arg: Variable => + assert(varToSym(n).contains(arg)) + val cells: Set[DSC] = getCells(n, arg) + val cell = cells.foldLeft(DSN(Some(graph), None).cells(0)) { + (c, p) => + graph.mergeCells(c, p) // TODO this causing everything to collapse + } + cell + case _ => ??? + + val valueCells = getCells(n, value) + val result = valueCells.foldLeft(graph.getPointee(addressCell)) { + (c, p) => + graph.mergeCells(p, c) + } + + if reachingDefs(n).contains(value) then + reachingDefs(n)(value).foreach ( + definition => + graph.varToCell(definition).update(value, result) + ) + else + graph.formals.update(value, result) + + case _ => + } + def analyze(): Any = + val domain = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).reverse + +// println(domain) + domain.foreach(visit) + + + println(graph.formals) + val results = graph.varToCell.keys.toSeq.sortBy(_.toShortString) + results.foreach { + pos => + println(pos) + val tab = " " + graph.varToCell(pos).foreach { + case (variable, cell) => + println(tab + variable.toString + " -> " + cell.toString) + } + } + println(graph.pointTo) +} diff --git a/src/main/scala/analysis/LocalDSA.scala b/src/main/scala/analysis/LocalDSA.scala new file mode 100644 index 000000000..22ee1e48f --- /dev/null +++ b/src/main/scala/analysis/LocalDSA.scala @@ -0,0 +1,131 @@ +//package analysis +// +//import ir.{Assert, Assume, BVADD, BinOp, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, Call, DirectCall, Expr, Extract, GoTo, IndirectCall, IntraProcIRCursor, Literal, LocalAssign, LocalVar, Memory, MemoryAssign, MemoryLoad, MemoryStore, NOP, Procedure, Program, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, computeDomain} +// +//import java.math.BigInteger +//import scala.annotation.{static, tailrec} +// +// +//class LocalDSA(val program: Program, val procedure: Procedure, constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], var symbolicAccesses: Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]]) extends Analysis[Any] { +// val graph: Graph = Graph(procedure) +// +// +// private val stackPointer = Register("R31", BitVecType(64)) +// private val linkRegister = Register("R30", BitVecType(64)) +// private val framePointer = Register("R29", BitVecType(64)) +// +// val bitvecnegative: BigInt = new BigInt(new BigInteger("9223372036854775808")) +// +// +// private val ignoreRegions: Set[Expr] = Set(linkRegister, framePointer) +// +// val malloc_register = Register("R0", BitVecType(64)) +// private var localVarCount: Int = -1 +// private def getNextLocalVarName: String = { +// localVarCount += 1 +// s"NormVar_$localVarCount" +// } +// +//// for (i <- 0 to 31) { +//// graph.pointersToCells.update(Register(s"R$i", BitVecType(64)), graph.makeCell()) +//// } +// +// def visitBinaryLocalAssign(lhs: Variable, op: BinOp, arg1: Variable, offset: BigInt) = { +// val cell = graph.getVariablePointee(arg1) +// val node = cell.node.get +// if node.isCollapsed then +// graph.collapsePointer(lhs) // TODO ensure passing right memory region here +// else if !node.isSeq /* && offset == 0 */ then // TODO here we are making everything with a offset a sequence +// val size = cell.offset + offset + 8 // assuming bitvector of 64, all the fields that matter are pointers +// node.updateSize(size) +// graph.unify(lhs, node.cell(cell.offset + offset)) +// else +// node.setSeq() +// val size = node.size.gcd(cell.offset) +// node.updateSize(size) +// graph.unify(lhs, cell) +// } +// +// def atomicPointer(n: CFGPosition) : Unit = { +// n match +// case DirectCall(target: Procedure, returnTarget, label) if procedure.name.equals("malloc") => +// val cell = graph.makeCell(Some(symbolicAccesses(n)(malloc_register).head.symbolicBase)) +// graph.unify(malloc_register, cell) +// // case _ => // TODO ignoring all other calls right now. Think about semantics of a call +// // should unify returns +// case LocalAssign(variable, expr, maybeString) => +// expr match +// case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) && arg1.equals(stackPointer) +// && evaluateExpression(arg2, constantProp(n)).isDefined && evaluateExpression(arg2, constantProp(n)).get.value >= bitvecnegative => +// // p = &x +// val node = graph.makeNode(Some(symbolicAccesses(n)(variable).head.symbolicBase)) +// val cell = node.cell() +// graph.unify(variable, cell) +//// case BinaryExpr(op, arg1: Variable, arg2) if symbolicAccesses(n).contains(arg1) && evaluateExpression(arg2, constantProp(n)).isDefined => // what TODO if can't evaluate arg2 +//// // variable = arg1 + (c = 0 * m) + arg2 +//// val offset: BigInt = evaluateExpression(arg2, constantProp(n)).get.value +//// visitBinaryLocalAssign(variable, op, arg1, offset) +//// case vari: Variable if symbolicAccesses(n).contains(vari) => // TODO actually check if q is a pointer +//// // p = q +//// val cell = graph.getVariablePointee(vari) +//// val node = cell.node.get +//// if node.isCollapsed then +//// graph.collapsePointer(variable) // TODO ensure passing right memory region here +//// else if !node.isSeq then +//// val size = cell.offset + 8 // assume all sizes are the same for now since we don't know sizes of everything +//// node.updateSize(size) +//// graph.unify(variable, cell) +//// else +//// node.setSeq() +//// graph.unify(variable, cell) // c is zero here +// case MemoryLoad(mem, index, endian, size) => +// // q = *p +// index match +// case BinaryExpr(op, arg1: Variable, arg2) if symbolicAccesses(n).contains(arg1) && evaluateExpression(arg2, constantProp(n)).isDefined => +// val local = symbolicAccesses(n).keySet.reduce( +// (v1, v2) => +// if v1.name.startsWith("NormVar") then +// v1 +// else if v2.name.startsWith("NormVar") then +// v2 +// else +// v1 +// ) +// assert(local.name.startsWith("NormVar")) +// visitBinaryLocalAssign(local, op, arg1, evaluateExpression(arg2, constantProp(n)).get.value) +// graph.getCellPointee(graph.getVariablePointee(local)).unify(graph.getVariablePointee(variable)) +// case vari: Variable if symbolicAccesses(n).contains(vari) => +// graph.getCellPointee(graph.getVariablePointee(vari)).unify(graph.getVariablePointee(variable)) +// case _ => +// case _ => +// case MemoryAssign(me, MemoryStore(mem, index, value, endian, size), label) => +// //*p = q +// index match +// case BinaryExpr(op, arg1: Variable, arg2) if symbolicAccesses(n).contains(arg1) => +// val local = symbolicAccesses(n).keySet.reduce( +// (v1, v2) => +// if v1.name.startsWith("NormVar") then +// v1 +// else if v2.name.startsWith("NormVar") then +// v2 +// else +// v1 +// ) +// assert(local.name.startsWith("NormVar")) +// visitBinaryLocalAssign(local, op, arg1, evaluateExpression(arg2, constantProp(n)).get.value) +// graph.getCellPointee(graph.getVariablePointee(local)). +// unify(graph.getVariablePointee(value.variables.head)) +// case vari: Variable if symbolicAccesses(n).contains(vari) => +// graph.getCellPointee(graph.getVariablePointee(vari)).unify(graph.getVariablePointee(value.variables.head)) +// case _ => +// case _ => +// } +// +// def analyze(): Any = { +// computeDomain(IntraProcIRCursor, Set(procedure)).foreach(atomicPointer) +// +//// println(graph.pointersToCells) +//// println(graph.pointsToRelations) +//// println(graph.nodes) +// } +//} diff --git a/src/main/scala/analysis/PointerTypeAnalysis.scala b/src/main/scala/analysis/PointerTypeAnalysis.scala new file mode 100644 index 000000000..e8aed8504 --- /dev/null +++ b/src/main/scala/analysis/PointerTypeAnalysis.scala @@ -0,0 +1,83 @@ +package analysis + +import analysis.solvers.{BackwardIDESolver, ForwardIDESolver} +import ir.{BinaryExpr, BitVecType, CFGPosition, DirectCall, Extract, GoTo, IRWalk, IndirectCall, Literal, LocalAssign, Memory, MemoryAssign, MemoryLoad, MemoryStore, Procedure, Program, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, end} + +import scala.collection.immutable.Map +import scala.collection.mutable + +trait PointerTypeFunctions extends BackwardIDEAnalysis[Variable, TwoElement, TwoElementLattice] { + + val valuelattice: TwoElementLattice = TwoElementLattice() + val edgelattice: EdgeFunctionLattice[TwoElement, TwoElementLattice] = EdgeFunctionLattice(valuelattice) + import edgelattice.{IdEdge, ConstEdge} + + def edgesOther(n: CFGPosition)(d: DL): Map[DL, EdgeFunction[TwoElement]] = { + n match + case s: CFGPosition if end(program.mainProcedure).equals(s) => + d match + case Left(value) => Map(d -> IdEdge()) + case Right(_) => Map(d -> IdEdge(), Left(Register("R31", BitVecType(64))) -> ConstEdge(TwoElementTop)) + + case LocalAssign(lhs, expr, _) => + expr match + case BinaryExpr(op, arg1, arg2) => + d match + case Left(value) if value == arg1 => Map(Left(lhs) -> IdEdge()) + case Left(value) if value == lhs => Map() + case _ => Map(d -> IdEdge()) + case rhs: Variable => + d match + case Left(value) if value == rhs => Map(Left(lhs) -> IdEdge()) + case Left(value) if value == lhs => Map() + case _ => Map(d -> IdEdge()) + case MemoryLoad(mem, index, endian, size) => + index match + case BinaryExpr(op, arg1: Variable, arg2) => + d match + case Left(value) if value == lhs || value == arg1 => Map() + case Left(value) => Map(d -> IdEdge()) + case Right(_) => Map(d -> IdEdge(), Left(arg1) -> ConstEdge(TwoElementTop)) + case variable: Variable => + d match + case Left(value) if value == lhs || value == variable => Map() + case Left(value) => Map(d -> IdEdge()) + case Right(_) => Map(d -> IdEdge(), Left(variable) -> ConstEdge(TwoElementTop)) + case _ => + d match + case Left(value) if value == lhs => Map() + case Left(value) => Map(d -> IdEdge()) + case Right(_) => Map(d -> IdEdge()) + case _ => + d match + case Left(value) if value == lhs => Map() + case Left(value) => Map(d -> IdEdge()) + case Right(_) => Map(d -> IdEdge()) + case MemoryAssign(mem, MemoryStore(mem2, index, value, endian, size), _) => + index match + case BinaryExpr(op, arg1: Variable, arg2) => + d match + case Left(value) if value != arg1 => Map(d -> IdEdge()) + case Left(value) if value == arg1 => Map() + case Right (_) => Map(d -> IdEdge(), Left(arg1) -> ConstEdge(TwoElementTop)) + case variable: Variable => + d match + case Left(value) if value != variable => Map(d -> IdEdge()) + case Left(value) if value == variable => Map() + case Right (_) => Map(d -> IdEdge(), Left(variable) -> ConstEdge(TwoElementTop)) + case _ => Map(d -> IdEdge()) + case _ => Map(d -> IdEdge()) + } + + def edgesCallToEntry(call: GoTo, entry: IndirectCall)(d: DL): Map[DL, EdgeFunction[TwoElement]] = Map(d -> IdEdge()) + + def edgesExitToAfterCall(exit: Procedure, aftercall: DirectCall)(d: DL): Map[DL, EdgeFunction[TwoElement]] = Map(d -> IdEdge()) + + def edgesCallToAfterCall(call: GoTo, aftercall: DirectCall)(d: DL): Map[DL, EdgeFunction[TwoElement]] = + d match + case Left(value) => Map() // maps all variables before the call to bottom + case Right(_) => Map(d -> IdEdge()) +} + +class PointerTypeAnalysis(program: Program) + extends BackwardIDESolver[Variable, TwoElement, TwoElementLattice](program), PointerTypeFunctions diff --git a/src/main/scala/analysis/PrePass.scala b/src/main/scala/analysis/PrePass.scala new file mode 100644 index 000000000..2328f919b --- /dev/null +++ b/src/main/scala/analysis/PrePass.scala @@ -0,0 +1,184 @@ +//package analysis +// +//import analysis.solvers.{SimplePushDownWorklistFixpointSolver, SimpleWorklistFixpointSolver} +//import ir.IRWalk.procedure +//import ir.{Assert, Assume, BVADD, BVAND, BVASHR, BVBinOp, BVCOMP, BVCONCAT, BVEQ, BVLSHR, BVMUL, BVNAND, BVNEQ, BVNOR, BVOR, BVSDIV, BVSGE, BVSGT, BVSHL, BVSLE, BVSLT, BVSMOD, BVSREM, BVSUB, BVUDIV, BVUGE, BVUGT, BVULE, BVULT, BVUREM, BVXNOR, BVXOR, BinOp, BinaryExpr, BitVecLiteral, BitVecType, BoolBinOp, CFGPosition, Call, DirectCall, Expr, Extract, GoTo, IntBinOp, IntraProcIRCursor, Literal, LocalAssign, LocalVar, Memory, MemoryAssign, MemoryLoad, MemoryStore, NOP, Procedure, Program, Register, Repeat, SignExtend, Statement, UnaryExpr, Variable, ZeroExtend, computeDomain} +//import specification.SpecGlobal +//import util.Logger +// +//import java.math.BigInteger +//import scala.collection.mutable +//import scala.math.BigInt +// +//case class SymbolicAccess(symbolicBase: MemoryRegion2, offset: BigInt) { +// override def toString: String = s"SymbolicAccess($symbolicBase, $offset)" +//} +// +//class PrePass(program: Program, +// constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], globals: Set[SpecGlobal], globalAddresses: Map[BigInt, String], globalOffsets: Map[BigInt, BigInt]) +// +// extends Analysis[Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]]], +// IRIntraproceduralForwardDependencies, +// SimpleWorklistFixpointSolver[CFGPosition, Map[Variable, Set[SymbolicAccess]], +// MapLattice[Variable, Set[SymbolicAccess], PowersetLattice[SymbolicAccess]]] { +// +// val domain: Set[CFGPosition] = computeDomain(IntraProcIRCursor, program.procedures).toSet +// val lattice: MapLattice[CFGPosition, Map[Variable, Set[SymbolicAccess]], +// MapLattice[Variable, Set[SymbolicAccess], PowersetLattice[SymbolicAccess]]] = MapLattice(MapLattice(PowersetLattice())) +// +// var mallocCount: Int +// = 0 +// private var stackCount: Int = 0 +// +// private def nextMallocCount = { +// mallocCount += 1 +// s"malloc_$mallocCount" +// } +// +// private def nextStackCount = { +// stackCount += 1 +// s"stack_$stackCount" +// } +// +// private var localVarCount: Int = -1 +// +// private def getNextLocalVarName: String = { +// localVarCount += 1 +// s"NormVar_$localVarCount" +// } +// +// +// private val stackPointer = Register("R31", BitVecType(64)) +// private val linkRegister = Register("R30", BitVecType(64)) +// private val framePointer = Register("R29", BitVecType(64)) +// private val mallocVariable = Register("R0", BitVecType(64)) +// +// def updateOffsets(variable: Variable, arg: Variable, offsetChange:BigInt, op: BinOp, s: Map[Variable, Set[SymbolicAccess]]): Map[Variable, Set[SymbolicAccess]] = { +// val newSyms: mutable.Set[SymbolicAccess] = mutable.Set() +// s(arg).foreach( +// sym => +// op match +// case BVADD => newSyms.add(SymbolicAccess(sym.symbolicBase, sym.offset + offsetChange)) +// case BVSUB => newSyms.add(SymbolicAccess(sym.symbolicBase, sym.offset - offsetChange)) +// case _ => ???// check if this happens often +// ) +// s + (variable -> newSyms.toSet) +// } +// +// +// def decToBinary(n: BigInt): Array[Int] = { +// val binaryNum: Array[Int] = new Array[Int](64) +// var i = 0 +// var num = n +// while (num > 0) { +// binaryNum(i) = (num % BigInt(2)).intValue +// num = num / 2 +// i += 1 +// } +// binaryNum +// } +// +// def twosComplementToDec(binary: Array[Int]): BigInt = { +// var result: BigInt = BigInt(0) +// var counter: Int = 0 +// binary.foreach( +// n => +// if counter == binary.length - 1 && n == 1 then +// result = result - BigInt(2).pow(counter) +// else if n == 1 then +// result = result + BigInt(2).pow(counter) +// counter += 1 +// ) +// result +// } +// +// def transfer(n: CFGPosition, symbolicAccesses: Map[Variable, Set[SymbolicAccess]]): Map[Variable, Set[SymbolicAccess]] = { +// +// val bitvecnegative: BigInt = new BigInt(new BigInteger("9223372036854775808")) //"18446744073709551615" +// +// val s = symbolicAccesses.filter((v, se) => +// !v.name.startsWith("NormVar") +// ) +// +// n match +// case LocalAssign(variable, expr, maybeString) => +// expr match +// case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) && arg1.equals(stackPointer) +// && evaluateExpression(arg2, constProp(n)).isDefined && evaluateExpression(arg2, constProp(n)).get.value >= bitvecnegative => +// val size = twosComplementToDec(decToBinary(evaluateExpression(arg2, constProp(n)).get.value)) +// s + (variable -> Set(SymbolicAccess(StackRegion2(s"Stack_${procedure(n).name}", procedure(n), BitVecLiteral(-size.intValue, 64)), 0))) +// case BinaryExpr(op, arg1: Variable, arg2) if s.contains(arg1) => // arg1 is a symbolic access variable +// evaluateExpression(arg2, constProp(n)) match +// case Some(value) => // arg2 is some constant +// updateOffsets(variable, arg1, value.value, op, s) +// case None => // couldn't evaluate R2 to a constant +// arg2 match +// case vari:Variable if s.contains(vari) => +// evaluateExpression(arg1, constProp(n)) match +// case Some(value) => +// updateOffsets(variable, vari, value.value, op, s) +// case None => s - variable +// case _ => +// s - variable +// case BinaryExpr(op, arg1, arg2: Variable) if s.contains(arg2) => +// evaluateExpression(arg1, constProp(n)) match +// case Some(value) => // arg1 is some constant +// updateOffsets(variable, arg2, value.value, op, s) +// case None => s - variable // couldn't evaluate R1 to a constant +// case vari: Variable if s.contains(vari) => +// s + (variable -> s(vari)) +// case MemoryLoad(mem, index, endian, size) => +// index match +// case BinaryExpr(op, arg1: Variable, arg2) if s.contains(arg1) && evaluateExpression(arg2, constProp(n)).isDefined => +// val local = LocalVar(getNextLocalVarName, BitVecType(64)) +// updateOffsets(local, arg1, evaluateExpression(arg2, constProp(n)).get.value, op, s) +// case _ => s +// case _ if s.contains(variable) => +// s - variable +// case _ => s +// case directCall: DirectCall if directCall.target.name == "malloc" => +// nextMallocCount +// evaluateExpression(mallocVariable, constProp(n)) match +// case Some(value) => +// s + (mallocVariable -> Set(SymbolicAccess(HeapRegion2(s"Malloc-${mallocCount}", value), 0))) +// case None => +// s + (mallocVariable -> Set(SymbolicAccess(HeapRegion2(s"Malloc-${mallocCount}", BitVecLiteral(-1, 64)), 0))) +// case MemoryAssign(mem, MemoryStore(m, index, value, endian, size), label) => +// index match +// case BinaryExpr(op, arg1: Variable, arg2) if s.contains(arg1) && evaluateExpression(arg2, constProp(n)).isDefined => +// val local = LocalVar(getNextLocalVarName, BitVecType(64)) +// updateOffsets(local, arg1, evaluateExpression(arg2, constProp(n)).get.value, op, s) +// case _ => s +// case _ => s +// } +// +// +//// override def analyze() = { +//// val results = super.analyze() +//// var offsetMapping +//// results.foreach( +//// +//// ) +//// results +//// } +//} +// +//trait MemoryRegion2 { +// val regionIdentifier: String +//} +// +//case class StackRegion2(override val regionIdentifier: String, parent: Procedure, size: BitVecLiteral) extends MemoryRegion2 { +// override def toString: String = s"Stack($regionIdentifier, ${parent.name}, $size)" +//// ${if symbolicAccess.isDefined then s", ${symbolicAccess.get}" else ""} +//} +// +//case class HeapRegion2(override val regionIdentifier: String, size: BitVecLiteral) extends MemoryRegion2 { +// override def toString: String = s"Heap($regionIdentifier, $size)" +//} +// +//case class DataRegion2(override val regionIdentifier: String, start: BitVecLiteral) extends MemoryRegion2 { +// override def toString: String = s"Data($regionIdentifier, $start)" +//} +// +// +// diff --git a/src/main/scala/analysis/ReachingDefs.scala b/src/main/scala/analysis/ReachingDefs.scala new file mode 100644 index 000000000..503b1f78d --- /dev/null +++ b/src/main/scala/analysis/ReachingDefs.scala @@ -0,0 +1,30 @@ +package analysis + +import analysis.solvers.SimpleWorklistFixpointSolver +import ir.{Assert, Assume, BitVecType, CFGPosition, Call, DirectCall, Expr, GoTo, IndirectCall, InterProcIRCursor, IntraProcIRCursor, LocalAssign, MemoryAssign, NOP, Procedure, Program, Register, Variable, computeDomain} + +abstract class ReachingDefs(program: Program, writesTo: Map[Procedure, Set[Register]]) extends Analysis[Map[CFGPosition, Map[Variable, Set[CFGPosition]]]] { + + val mallocRegister = Register("R0", BitVecType(64)) + val domain: Set[CFGPosition] = computeDomain(IntraProcIRCursor, program.procedures).toSet + val lattice: MapLattice[CFGPosition, Map[Variable, Set[CFGPosition]], MapLattice[Variable, Set[CFGPosition], PowersetLattice[CFGPosition]]] = new MapLattice(new MapLattice(new PowersetLattice[CFGPosition]())) + + def transfer(n: CFGPosition, s: Map[Variable, Set[CFGPosition]]): Map[Variable, Set[CFGPosition]] = + n match + case loc:LocalAssign => + s + (loc.lhs -> Set(n)) + case DirectCall(proc, target, label) if proc.name == "malloc" => + s + (mallocRegister -> Set(n)) + case DirectCall(proc, target, label) if writesTo.contains(proc) => + val result: Map[Variable, Set[CFGPosition]] = writesTo(proc).foldLeft(Map[Variable, Set[CFGPosition]]()){ + (m, register) => + m + (register -> Set(n)) + } + s ++ result + case _ => s + +} + +class ReachingDefsAnalysis(program: Program, writesTo: Map[Procedure, Set[Register]]) extends ReachingDefs(program, writesTo), IRInterproceduralForwardDependencies, + SimpleWorklistFixpointSolver[CFGPosition, Map[Variable, Set[CFGPosition]], MapLattice[Variable, Set[CFGPosition], PowersetLattice[CFGPosition]]] + diff --git a/src/main/scala/analysis/RegionBuilder.scala b/src/main/scala/analysis/RegionBuilder.scala new file mode 100644 index 000000000..c752c1559 --- /dev/null +++ b/src/main/scala/analysis/RegionBuilder.scala @@ -0,0 +1,336 @@ +//package analysis +// +//import analysis.solvers.UnionFindSolver +//import ir.{BinaryExpr, BitVecLiteral, CFGPosition, Expr, Extract, InterProcIRCursor, IntraProcIRCursor, Literal, LocalAssign, Memory, MemoryAssign, MemoryLoad, MemoryStore, Procedure, Program, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, computeDomain} +//import specification.{ExternalFunction, SpecGlobal} +// +//import scala.collection.mutable +//import scala.util.boundary, boundary.break +// +//class RegionBuilder(program: Program, symResults: Map[CFGPosition, Map[SymbolicAccess, TwoElement]], +// constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], +// globals: Set[SpecGlobal], globalOffsets: Map[BigInt, BigInt], +// externalFunctions: Set[ExternalFunction]) extends Analysis[Any] { +// +// val graphs: mutable.Map[Procedure, DSG] = mutable.Map() +// val nodes: mutable.Map[MemoryRegion2, DSN] = mutable.Map() +// val solver: UnionFindSolver[StTerm] = UnionFindSolver() +// val loadStore: mutable.Set[CFGPosition] = mutable.Set() +// val pointTo: mutable.Map[DSC, DSC] = mutable.Map() +// +// +// private def replaceInPointTo(oldCell: DSC, newCell:DSC) = +// pointTo.foreach{ +// case (pointer, pointee) => +// if pointee.equals(oldCell) then +// pointTo.update(pointer, newCell) +// } +// +// private def getPointee(cell: DSC): DSC = +// if !pointTo.contains(cell) then +// val node = DSN(None, None) +// pointTo.update(cell, node.cells(0)) +// pointTo(cell) +// +// +// +// private def earlyCollapse(node: DSN) : Unit = +// node.collapsed = true +// node.cells.clear() +// node.addCell(0, 0) +// +// private def collapseNode(node: DSN): Unit = +// val e = DSC(None, 0) +// val cell = node.cells.foldLeft(e){ +// (c, field) => mergeCells(c, getPointee(field._2)) +// } +// earlyCollapse(node) +// pointTo.update(node.cells(0), cell) +// +// +// private def mergeCells(cell1: DSC, cell2: DSC): DSC = +// if (incompatibleTypes(cell1, cell2)) then +// collapseNode(cell2.node.get) +// +// if cell2.node.get.region.isEmpty && cell1.node.isDefined then +// cell2.node.get.region = cell1.node.get.region +// +// if cell2.node.get.collapsed then +// if cell1.node.isDefined then +// cell1.node.get.cells.foreach{ +// case (offset, cell) => +// if pointTo.contains(cell) then +// if pointTo.contains(cell2.node.get.cells(0)) then +// mergeCells(getPointee(cell), getPointee(cell2.node.get.cells(0))) +// else +// pointTo.update(cell2.node.get.cells(0), getPointee(cell)) +// pointTo.remove(cell) +// replaceInPointTo(cell, cell2.node.get.cells(0)) +// } +// cell2.node.get.cells(0) +// else +// if pointTo.contains(cell1) then +// if pointTo.contains(cell2.node.get.cells(0)) then +// mergeCells(getPointee(cell1), getPointee(cell2.node.get.cells(0))) +// else +// pointTo.update(cell2.node.get.cells(0), getPointee(cell1)) +// pointTo.remove(cell1) +// replaceInPointTo(cell1, cell2.node.get.cells(0)) +// cell2.node.get.cells(0) +// else +// cell1.node.get.cells.foreach{ +// case (offset, cell) => +// if pointTo.contains(cell) then +// if pointTo.contains(cell2.node.get.cells(offset)) then +// mergeCells(getPointee(cell), getPointee(cell2.node.get.cells(offset))) +// else +// pointTo.update(cell2.node.get.cells(offset), getPointee(cell)) +// pointTo.remove(cell) +// replaceInPointTo(cell, cell2.node.get.cells(offset)) +// +// +// } +// cell2 +// +// +// private def incompatibleTypes(cell1: DSC, cell2: DSC): Boolean = +// if cell2.node.get.collapsed then +// return false +// else if cell1.node.isEmpty then +// return true // TODO not sure about this +// else if cell1.node.get.cells.size != cell2.node.get.cells.size then +// return true +// else +// (cell1.node.get.cells zip cell2.node.get.cells).foreach { +// case ((o1, c1), (o2, c2)) => +// if o1 != o2 || !c1.accessedSizes.equals(c2.accessedSizes) then +// return true +// } +// false +// +// private def multiAccessesSizes(node: DSN): Boolean = +// node.cells.foreach( +// c => +// val cell = c._2 +// if cell.accessedSizes.size > 1 then +// return true +// ) +// false +// +// +// private val swappedOffsets = globalOffsets.map(_.swap) +// +// val globalMapping: mutable.Map[(BigInt, BigInt), DSN] = globals.foldLeft(mutable.Map[(BigInt, BigInt), DSN]()) { +// (m, global) => +// var address: BigInt = global.address +// if swappedOffsets.contains(address) then +// address = swappedOffsets(address) +// m + ((address, address + global.size) -> DSN(None, Some(DataRegion2(global.name, global.address, global.size)))) +// } +// +// externalFunctions.foreach( +// external => +// var address: BigInt = external.offset +// if swappedOffsets.contains(address) then +// address = swappedOffsets(address) +// globalMapping.update((address, address), DSN(None, Some(DataRegion2(external.name, address, 0)))) +// ) +// +// +// val varToSym: Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]] = symResults.foldLeft(Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]]()) { +// (outerMap, syms) => +// val position = syms._1 +// val innerMap = syms._2.foldLeft(Map[Variable, Set[SymbolicAccess]]()) { +// (m, access) => +// val b = position +// if (m.contains(access._1.accessor)) then +// m + (access._1.accessor -> (m(access._1.accessor) + access._1)) +// else +// m + (access._1.accessor -> Set(access._1)) +// } +// +// outerMap + (position -> innerMap) +// } +// +// private def isGlobal(address: BigInt): Option[DSN] = +// for (elem <- globalMapping) { +// val range = elem._1 +// if address >= range._1 && address <= range._2 then +// return Some(elem._2) +// } +// None +// +// +// +// private def buildNode(sym: SymbolicAccess, offset: BigInt, size: Int): Unit = +// val region = sym.symbolicBase +// val newOffset = sym.offset + offset +// val proc = region match +// case DataRegion2(regionIdentifier, start, size) => ??? +// case HeapRegion2(regionIdentifier, proc, size) => proc +// case StackRegion2(regionIdentifier, proc, size) => proc +// case UnknownRegion2(regionIdentifier, proc) => proc +// val graph = graphs(proc) +// val node = graph.addNode(region, newOffset, size) +// nodes.update(region, node) +// +// private def getCell(sym: SymbolicAccess, offset: BigInt): DSC = +// val region = sym.symbolicBase +// val newOffset = sym.offset + offset +// val node = nodes(region) +// if node.collapsed then +// node.cells(0) +// else +// node.cells(newOffset) +// +// private def visit(n: CFGPosition): Unit = +// n match +// case LocalAssign(variable, expr, maybeString) => +// expr match +// case MemoryLoad(mem, index, endian, size) => +// val byteSize = (size.toDouble/8).ceil.toInt +// if evaluateExpression(index, constProp(n)).isDefined && isGlobal(evaluateExpression(index, constProp(n)).get.value).isDefined then +// val address = evaluateExpression(index, constProp(n)).get.value +// val node: DSN = isGlobal(evaluateExpression(index, constProp(n)).get.value).get +// val baseAddress = node.region.get.asInstanceOf[DataRegion2].start +// val offset = address - baseAddress +// node.addCell(offset, size) +// loadStore.add(n) +// else +// index match +// case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => +// assert(varToSym(n).contains(arg1)) +// val offset = evaluateExpression(arg2, constProp(n)).get.value +// varToSym(n)(arg1).foreach(sym => buildNode(sym, offset, byteSize)) +// loadStore.add(n) +// case arg: Variable => +// assert(varToSym(n).contains(arg)) +// varToSym(n)(arg).foreach(sym => buildNode(sym, 0, byteSize)) +// loadStore.add(n) +// case _ => ??? +// case _ => +// case MemoryAssign(mem, MemoryStore(mem2, index, value, endian, size), label) => +// val byteSize = (size.toDouble/8).ceil.toInt +// if evaluateExpression(index, constProp(n)).isDefined && isGlobal(evaluateExpression(index, constProp(n)).get.value).isDefined then +// val address = evaluateExpression(index, constProp(n)).get.value +// val node: DSN = isGlobal(evaluateExpression(index, constProp(n)).get.value).get +// val baseAddress = node.region.get.asInstanceOf[DataRegion2].start +// val offset = address - baseAddress +// node.addCell(offset, size) +// loadStore.add(n) +// else +// index match +// case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => +// assert(varToSym(n).contains(arg1)) +// val offset = evaluateExpression(arg2, constProp(n)).get.value +// varToSym(n)(arg1).foreach(sym => buildNode(sym, offset, byteSize)) +// loadStore.add(n) +// value match +// case BinaryExpr(op, arg1: Variable, arg2) if varToSym(n).contains(arg1) && evaluateExpression(arg2, constProp(n)).isDefined => +// val offset = evaluateExpression(arg2, constProp(n)).get.value +// varToSym(n)(arg1).foreach(sym => buildNode(sym, offset, byteSize)) +// case variable: Variable if varToSym(n).contains(variable) => +// varToSym(n)(variable).foreach(sym => buildNode(sym, 0, byteSize)) +// case _ => +// case arg: Variable => +// assert(varToSym(n).contains(arg)) +// varToSym(n)(arg).foreach(sym => buildNode(sym, 0, byteSize)) +// loadStore.add(n) +// value match +// case BinaryExpr(op, arg1: Variable, arg2) if varToSym(n).contains(arg1) && evaluateExpression(arg2, constProp(n)).isDefined => +// val offset = evaluateExpression(arg2, constProp(n)).get.value +// varToSym(n)(arg1).foreach(sym => buildNode(sym, offset, byteSize)) +// case variable: Variable if varToSym(n).contains(variable) => +// varToSym(n)(variable).foreach(sym => buildNode(sym, 0, byteSize)) +// +// case _ => +// case _ => ??? +// case _ => +// +// private def coolVisit(n: CFGPosition): Unit = +// n match +// case LocalAssign(variable, expr, maybeString) => +// val pointers : mutable.Set[DSC] = mutable.Set() +// varToSym(n).getOrElse(variable, Set()).foreach(sym => pointers.add(getCell(sym, 0))) +// +// expr match +// case MemoryLoad(mem, index, endian, size) => +// val byteSize = (size.toDouble / 8).ceil.toInt +// val pointees: mutable.Set[DSC] = mutable.Set() +// if evaluateExpression(index, constProp(n)).isDefined && isGlobal(evaluateExpression(index, constProp(n)).get.value).isDefined then +// val address = evaluateExpression(index, constProp(n)).get.value +// val node: DSN = isGlobal(evaluateExpression(index, constProp(n)).get.value).get +// val baseAddress = node.region.get.asInstanceOf[DataRegion2].start +// val offset = address - baseAddress +// if node.collapsed then pointees.add(getPointee(node.cells(0))) else pointees.add(getPointee(node.cells(offset))) +// else +// index match +// case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => +// assert(varToSym(n).contains(arg1)) +// val offset = evaluateExpression(arg2, constProp(n)).get.value +// varToSym(n)(arg1).foreach(sym => pointees.add(getPointee(getCell(sym, offset)))) +// case arg: Variable => +// assert(varToSym(n).contains(arg)) +// varToSym(n)(arg).foreach(sym => pointees.add(getPointee(getCell(sym, 0)))) +// case _ => ??? +// pointees.foreach( +// pointee => +// pointers.foreach( +// pointer => mergeCells(pointer, pointee) +// ) +// ) +// case _ => +// case MemoryAssign(mem, MemoryStore(mem2, index, value, endian, size), label) => +// val pointees : mutable.Set[DSC] = mutable.Set() +// val pointers: mutable.Set[DSC] = mutable.Set() +// val byteSize = (size.toDouble / 8).ceil.toInt +// if evaluateExpression(index, constProp(n)).isDefined && isGlobal(evaluateExpression(index, constProp(n)).get.value).isDefined then +// val address = evaluateExpression(index, constProp(n)).get.value +// val node: DSN = isGlobal(evaluateExpression(index, constProp(n)).get.value).get +// val baseAddress = node.region.get.asInstanceOf[DataRegion2].start +// val offset = address - baseAddress +// if node.collapsed then pointees.add(getPointee(node.cells(0))) else pointees.add(getPointee(node.cells(offset))) +// else +// index match +// case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => +// assert(varToSym(n).contains(arg1)) +// val offset = evaluateExpression(arg2, constProp(n)).get.value +// varToSym(n)(arg1).foreach(sym => pointees.add(getPointee(getCell(sym, offset)))) +// value match +// case BinaryExpr(op, arg1: Variable, arg2) if varToSym(n).contains(arg1) && evaluateExpression(arg2, constProp(n)).isDefined => +// val offset = evaluateExpression(arg2, constProp(n)).get.value +// varToSym(n)(arg1).foreach(sym => pointers.add(getCell(sym, offset))) +// case variable: Variable if varToSym(n).contains(variable) => +// varToSym(n)(variable).foreach(sym => pointers.add(getCell(sym, 0))) +// case _ => +// case arg: Variable => +// assert(varToSym(n).contains(arg)) +// varToSym(n)(arg).foreach(sym => pointees.add(getPointee(getCell(sym, 0)))) +// value match +// case BinaryExpr(op, arg1: Variable, arg2) if varToSym(n).contains(arg1) && evaluateExpression(arg2, constProp(n)).isDefined => +// val offset = evaluateExpression(arg2, constProp(n)).get.value +// varToSym(n)(arg1).foreach(sym => pointers.add(getCell(sym, offset))) +// case variable: Variable if varToSym(n).contains(variable) => +// varToSym(n)(variable).foreach(sym => pointers.add(getCell(sym, 0))) +// +// case _ => +// case _ => ??? +// pointees.foreach( +// pointee => +// pointers.foreach( +// pointer => mergeCells(pointer, pointee) +// ) +// ) +// case _ => +// +// +// def analyze(): Any = ??? +//// program.procedures.foreach(proc => graphs.update(proc, DSG(proc))) +//// computeDomain(InterProcIRCursor, Set(program.mainProcedure)).foreach(visit) +//// nodes.values.foreach(earlyCollapse) +//// loadStore.foreach(coolVisit) +//// pointTo.foreach{ +//// case (cell1, cell2) => +//// println(cell1.toString + " -> " + cell2.toString) +//// } +//} diff --git a/src/main/scala/analysis/SymbolicAccessAnalysis.scala b/src/main/scala/analysis/SymbolicAccessAnalysis.scala new file mode 100644 index 000000000..247862594 --- /dev/null +++ b/src/main/scala/analysis/SymbolicAccessAnalysis.scala @@ -0,0 +1,168 @@ +package analysis + +import analysis.solvers.ForwardIDESolver +import ir.IRWalk.procedure +import ir.{BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Extract, GoTo, IndirectCall, Literal, LocalAssign, Memory, MemoryLoad, MemoryStore, Procedure, Program, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend} + +import java.math.BigInteger + +case class SymbolicAccess(accessor: Variable, symbolicBase: MemoryRegion2, offset: BigInt) { + override def toString: String = s"SymbolicAccess($accessor, $symbolicBase, $offset)" +} + +trait MemoryRegion2 { + val regionIdentifier: String + + override def toString: String = s"MemoryRegion($regionIdentifier)" +} + +case class StackRegion2(override val regionIdentifier: String, proc: Procedure, size: BigInt) extends MemoryRegion2 { + override def toString: String = s"Stack($regionIdentifier, $size)" +} + +case class HeapRegion2(override val regionIdentifier: String, proc: Procedure, size: BigInt) extends MemoryRegion2 { + override def toString: String = s"Heap($regionIdentifier, $size)" +} + +case class DataRegion2(override val regionIdentifier: String, start: BigInt, size: BigInt) extends MemoryRegion2 { + override def toString: String = s"Data($regionIdentifier, $start)" +} + +case class UnknownRegion2(override val regionIdentifier: String, proc: Procedure) extends MemoryRegion2 { + override def toString: String = s"Unknown($regionIdentifier)" +} + +trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]]) extends ForwardIDEAnalysis[SymbolicAccess, TwoElement, TwoElementLattice] { + + private val stackPointer = Register("R31", BitVecType(64)) + private val linkRegister = Register("R30", BitVecType(64)) + private val framePointer = Register("R29", BitVecType(64)) + private val mallocVariable = Register("R0", BitVecType(64)) + + var mallocCount: Int = 0 + private def nextMallocCount = { + mallocCount += 1 + s"malloc_$mallocCount" + } + + var unknownCount: Int = 0 + private def nextunknownCount = { + unknownCount += 1 + s"unknown_$unknownCount" + } + + val valuelattice: TwoElementLattice = TwoElementLattice() + val edgelattice: EdgeFunctionLattice[TwoElement, TwoElementLattice] = EdgeFunctionLattice(valuelattice) + import edgelattice.{IdEdge, ConstEdge} + + def decToBinary(n: BigInt): Array[Int] = { + val binaryNum: Array[Int] = new Array[Int](64) + var i = 0 + var num = n + while (num > 0) { + binaryNum(i) = (num % BigInt(2)).intValue + num = num / 2 + i += 1 + } + binaryNum + } + + def twosComplementToDec(binary: Array[Int]): BigInt = { + var result: BigInt = BigInt(0) + var counter: Int = 0 + binary.foreach( + n => + if counter == binary.length - 1 && n == 1 then + result = result - BigInt(2).pow(counter) + else if n == 1 then + result = result + BigInt(2).pow(counter) + counter += 1 + ) + result + } + + def edgesCallToEntry(call: DirectCall, entry: Procedure)(d: DL): Map[DL, EdgeFunction[TwoElement]] = + d match + case Left(value) => + value.symbolicBase match + case StackRegion2(regionIdentifier, parent, size) => Map() + case _ => Map(d -> IdEdge()) + case Right(_) => Map(d -> IdEdge()) + + def edgesExitToAfterCall(exit: IndirectCall, aftercall: GoTo)(d: DL): Map[DL, EdgeFunction[TwoElement]] = + d match + case Left(value) => + value.symbolicBase match + case StackRegion2(regionIdentifier, parent, size) => Map() + case _ => + if value.accessor.name == "R29" then + Map() + else Map(d -> IdEdge()) + case Right(_) => Map(d -> IdEdge()) + + def edgesCallToAfterCall(call: DirectCall, aftercall: GoTo)(d: DL): Map[DL, EdgeFunction[TwoElement]] = + d match + case Left(value) => + value.symbolicBase match + case StackRegion2(regionIdentifier, parent, size) => Map(d -> IdEdge()) + case _ => Map() // maps all variables before the call to bottom + case Right(_) => Map(d -> IdEdge()) + + def edgesOther(n: CFGPosition)(d: DL): Map[DL, EdgeFunction[TwoElement]] = + val bitvecnegative: BigInt = new BigInt(new BigInteger("9223372036854775808")) // negative 64 bit integer + + n match + case LocalAssign(variable, expr, maybeString) => + expr match + case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) && arg1.equals(stackPointer) + && evaluateExpression(arg2, constProp(n)).isDefined && evaluateExpression(arg2, constProp(n)).get.value >= bitvecnegative => + d match + case Left(value) if value.accessor == variable => Map() + case Left(value) => Map(d -> IdEdge()) + case Right(_) => + val size = twosComplementToDec(decToBinary(evaluateExpression(arg2, constProp(n)).get.value)) + Map(d -> IdEdge(), Left(SymbolicAccess(variable, StackRegion2(s"Stack_${procedure(n).name}", procedure(n), -size), 0)) -> ConstEdge(TwoElementTop)) + case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => + d match + case Left(value) if value.accessor == arg1 => + val offsetUpdate = evaluateExpression(arg2, constProp(n)).get.value + val result: Map[DL, EdgeFunction[TwoElement]] = Map(Left(SymbolicAccess(variable, value.symbolicBase, value.offset + offsetUpdate)) -> ConstEdge(TwoElementTop)) + if value.accessor != variable then + result + (d -> IdEdge()) + else + result + case Left(value) if value.accessor == variable => Map() + case _ => Map(d -> IdEdge()) + case arg:Variable => + d match + case Left(value) if value.accessor == arg => + val result: Map[DL, EdgeFunction[TwoElement]] = Map(Left(SymbolicAccess(variable, value.symbolicBase, value.offset)) -> ConstEdge(TwoElementTop)) + if value.accessor != variable then + result + (d -> IdEdge()) + else + result + case Left(value) if value.accessor == variable => Map() + case _ => Map(d -> IdEdge()) + case MemoryLoad(mem, index, endian, size) => + d match + case Left(value) if value.accessor == variable => Map() + case Left(value) => Map(d -> IdEdge()) + case Right(_) => Map(d -> IdEdge(), Left(SymbolicAccess(variable, UnknownRegion2(nextunknownCount, procedure(n)), 0)) -> ConstEdge(TwoElementTop)) + case _ => + d match + case Left(value) if value.accessor == variable => Map() + case _ => Map(d -> IdEdge()) + case DirectCall(proc, ret, label) if proc.name == "malloc" => + d match + case Left(value) if value.accessor == mallocVariable => Map() + case Left(value) => Map(d -> IdEdge()) + case Right(value) => + val size: BigInt = evaluateExpression(mallocVariable, constProp(n)) match + case Some(value) => value.value + case None => -1 + Map(d -> IdEdge(), Left(SymbolicAccess(mallocVariable, HeapRegion2(nextMallocCount, procedure(n), size), 0)) -> ConstEdge(TwoElementTop)) + case _ => Map(d -> IdEdge()) +} + +class SymbolicAccessAnalysis(program: Program, constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]]) + extends ForwardIDESolver[SymbolicAccess, TwoElement, TwoElementLattice](program), SymbolicAccessFunctions(constProp) \ No newline at end of file diff --git a/src/main/scala/analysis/WriteToAnalysis.scala b/src/main/scala/analysis/WriteToAnalysis.scala new file mode 100644 index 000000000..b1a58ad24 --- /dev/null +++ b/src/main/scala/analysis/WriteToAnalysis.scala @@ -0,0 +1,50 @@ +package analysis + +import ir.{Assert, Assume, BitVecType, Call, DirectCall, GoTo, LocalAssign, MemoryAssign, NOP, Procedure, Program, Register} + +import scala.collection.mutable + +class WriteToAnalysis(program: Program) extends Analysis[Map[Procedure, Set[Register]]] { + + val writesTo: mutable.Map[Procedure, Set[Register]] = mutable.Map() + val mallocRegister = Register("R0", BitVecType(64)) + val paramRegisters: Set[Register] = Set( + mallocRegister, + Register("R1", BitVecType(64)), + Register("R2", BitVecType(64)), + Register("R3", BitVecType(64)), + Register("R4", BitVecType(64)), + Register("R5", BitVecType(64)), + Register("R6", BitVecType(64)), + Register("R7", BitVecType(64)), + ) + + def getWritesTos(proc: Procedure): Set[Register] = { + if writesTo.contains(proc) then + writesTo(proc) + else + val writtenTo : mutable.Set[Register] = mutable.Set() + proc.blocks.foreach( + block => + block.statements.foreach { + case LocalAssign(variable: Register, value, label) if paramRegisters.contains(variable) => + writtenTo.add(variable) + case _ => + } + + block.jump match + case DirectCall(proc, returnTarget, label) if proc.name == "malloc" => + writtenTo.add(mallocRegister) + case DirectCall(proc, returnTarget, label) if program.procedures.contains(proc) => + writtenTo.++=(getWritesTos(proc)) + case _ => + ) + + writesTo.update(proc, writtenTo.toSet) + writesTo(proc) + } + + def analyze(): Map[Procedure, Set[Register]] = + program.procedures.foreach(proc => getWritesTos) + writesTo.toMap +} diff --git a/src/main/scala/analysis/solvers/IDESolver.scala b/src/main/scala/analysis/solvers/IDESolver.scala index 5773ba047..1799e36e4 100644 --- a/src/main/scala/analysis/solvers/IDESolver.scala +++ b/src/main/scala/analysis/solvers/IDESolver.scala @@ -245,7 +245,10 @@ abstract class BackwardIDESolver[D, T, L <: Lattice[T]](program: Program) protected def callToReturn(call: GoTo): DirectCall = call.parent.jump.asInstanceOf[DirectCall] - protected def returnToCall(ret: DirectCall): GoTo = ret.parent.fallthrough.get + protected def returnToCall(ret: DirectCall): GoTo = + if ret.parent.fallthrough.isEmpty then + print("") + ret.parent.fallthrough.get protected def getCallee(call: GoTo): IndirectCall = callToReturn(call).target.end.asInstanceOf[IndirectCall] diff --git a/src/main/scala/ir/IRCursor.scala b/src/main/scala/ir/IRCursor.scala index 2b5d3123e..083216250 100644 --- a/src/main/scala/ir/IRCursor.scala +++ b/src/main/scala/ir/IRCursor.scala @@ -253,7 +253,7 @@ def toDot[T <: CFGPosition]( case s => s.toString } if (labels.contains(node)) { - text += "\n" ++ labels(node) + text = labels(node) ++ "\n" ++ text } text } diff --git a/src/main/scala/translating/ReadELFLoader.scala b/src/main/scala/translating/ReadELFLoader.scala index d874f05fe..60fac28bf 100644 --- a/src/main/scala/translating/ReadELFLoader.scala +++ b/src/main/scala/translating/ReadELFLoader.scala @@ -76,7 +76,7 @@ object ReadELFLoader { } def visitSymbolTableRow(ctx: SymbolTableRowContext): Option[SpecGlobal] = { - if (ctx.entrytype.getText == "OBJECT" && ctx.bind.getText == "GLOBAL" && ctx.vis.getText == "DEFAULT") { + if ((ctx.entrytype.getText == "OBJECT" || ctx.entrytype.getText == "FUNC") && ctx.bind.getText == "GLOBAL" && ctx.vis.getText == "DEFAULT") { val name = ctx.name.getText if (name.forall(allowedChars.contains)) { Some(SpecGlobal(name, ctx.size.getText.toInt * 8, None, hexToBigInt(ctx.value.getText))) diff --git a/src/test/scala/RegionBuilderTests.scala b/src/test/scala/RegionBuilderTests.scala new file mode 100644 index 000000000..70b37086b --- /dev/null +++ b/src/test/scala/RegionBuilderTests.scala @@ -0,0 +1,28 @@ +import org.scalatest.funsuite.AnyFunSuite +import test_util.TestUtil +import util.{BASILConfig, BoogieGeneratorConfig, ILLoadingConfig, RunUtils, StaticAnalysisConfig} + +class RegionBuilderTests extends AnyFunSuite, TestUtil { + + for (p <- correctPrograms) { + val programPath = correctPath + "/" + p + val variations = getSubdirectories(programPath) + variations.foreach(t => + test("Correct" + "/" + p + "/" + t) { + RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = correctPath + s"/$p/$t/$p.adt", + relfFile = correctPath + s"/$p/$t/$p.relf", + specFile = None, + dumpIL = None + ), + staticAnalysis = Some(StaticAnalysisConfig(None)), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + } + ) + } +} From 02eff56e2c0adf4284ca184da5fa69fcc700757d Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 29 Apr 2024 12:15:29 +1000 Subject: [PATCH 006/104] added stack object mapping --- src/main/scala/analysis/DSAUtility.scala | 172 ++++++++++++++++----- src/main/scala/analysis/Local.scala | 60 ++++++- src/main/scala/analysis/ReachingDefs.scala | 8 +- 3 files changed, 187 insertions(+), 53 deletions(-) diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index 7bce8e920..3566b8f5b 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -1,9 +1,13 @@ package analysis -import ir.{BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, IntraProcIRCursor, LocalAssign, MemoryAssign, MemoryStore, Procedure, Register, Variable, computeDomain, toShortString} +import ir.{BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IntraProcIRCursor, Literal, LocalAssign, Memory, MemoryAssign, MemoryLoad, MemoryStore, Procedure, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, computeDomain, toShortString} import specification.{ExternalFunction, SpecGlobal} -import scala.collection.mutable; +import scala.util.control.Breaks.{break, breakable} +import java.math.BigInteger +import scala.collection.mutable + +import scala.collection.mutable object NodeCounter { var counter: Int = 0 @@ -17,10 +21,9 @@ object NodeCounter { } - - -class DSG(proc: Procedure, +class DSG(val proc: Procedure, constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + varToSym: Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]], globals: Set[SpecGlobal], globalOffsets: Map[BigInt, BigInt], externalFunctions: Set[ExternalFunction], reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], @@ -32,6 +35,91 @@ class DSG(proc: Procedure, val mallocRegister = Register("R0", BitVecType(64)) val stackPointer = Register("R31", BitVecType(64)) + // make stack nodes with + val stackMapping: mutable.Map[BigInt, DSN] = + computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(Map[BigInt, DSN]()) { + (results, pos) => stackBuilder(pos, results) + }.to(collection.mutable.Map) + + def stackBuilder(pos: CFGPosition, m: Map[BigInt, DSN]): Map[BigInt, DSN] = { + pos match + case LocalAssign(variable: Variable, expr: Expr, _) => + expr match + case MemoryLoad(mem, index, endian, size) => + val byteSize = (size.toDouble / 8).ceil.toInt + index match + case BinaryExpr(op, arg1: Variable, arg2) if varToSym.contains(pos) && varToSym(pos).contains(arg1) && + evaluateExpression(arg2, constProp(pos)).isDefined => + var offset = evaluateExpression(arg2, constProp(pos)).get.value + varToSym(pos)(arg1).foldLeft(m) { + (m, sym) => + sym match + case SymbolicAccess(accessor, StackRegion2(regionIdentifier, proc, size), symOffset) => + offset = offset + symOffset + if m.contains(offset) then + m(offset).addCell(0, byteSize) + m + else + val node = DSN(Some(this), Some(StackRegion2(pos.toShortString, proc, byteSize))) + node.addCell(0, byteSize) + m + (offset -> node) + case _ => m + } + case arg: Variable if varToSym.contains(pos) && varToSym(pos).contains(arg) => + varToSym(pos)(arg).foldLeft(m) { + (m, sym) => + sym match + case SymbolicAccess(accessor, StackRegion2(regionIdentifier, proc, size), offset) => + if m.contains(offset) then + m(offset).addCell(0, byteSize) + m + else + val node = DSN(Some(this), Some(StackRegion2(pos.toShortString, proc, byteSize))) + node.addCell(0, byteSize) + m + (offset -> node) + case _ => m + } + case _ => m + case _ => m + case MemoryAssign(mem, MemoryStore(mem2, index, value, endian, size), label) => + val byteSize = (size.toDouble / 8).ceil.toInt + index match + case BinaryExpr(op, arg1: Variable, arg2) if varToSym.contains(pos) && varToSym(pos).contains(arg1) && + evaluateExpression(arg2, constProp(pos)).isDefined => + var offset = evaluateExpression(arg2, constProp(pos)).get.value + varToSym(pos)(arg1).foldLeft(m) { + (m, sym) => + sym match + case SymbolicAccess(accessor, StackRegion2(regionIdentifier, proc, size), symOffset) => + offset = offset + symOffset + if m.contains(offset) then + m(offset).addCell(0, byteSize) + m + else + val node = DSN(Some(this), Some(StackRegion2(pos.toShortString, proc, byteSize))) + node.addCell(0, byteSize) + m + (offset -> node) + case _ => m + } + case arg: Variable if varToSym.contains(pos) && varToSym(pos).contains(arg) => + varToSym(pos)(arg).foldLeft(m) { + (m, sym) => + sym match + case SymbolicAccess(accessor, StackRegion2(regionIdentifier, proc, size), offset) => + if m.contains(offset) then + m(offset).addCell(0, byteSize) + m + else + val node = DSN(Some(this), Some(StackRegion2(pos.toShortString, proc, byteSize))) + node.addCell(0, byteSize) + m + (offset -> node) + case _ => m + } + case _ => m + case _ => m + + } + // make all globals private val swappedOffsets = globalOffsets.map(_.swap) @@ -77,6 +165,28 @@ class DSG(proc: Procedure, pointTo.update(pointer, newCell) } + private def replaceInGlobals(oldCell: DSC, newCell: DSC) = + if oldCell.node.isDefined then + globalMapping.foreach { + case (key, node) => + if node.equals(oldCell.node.get) then + globalMapping.update(key, newCell.node.get) + } + + private def replaceInStack(oldCell: DSC, newCell: DSC) = + if oldCell.node.isDefined then + stackMapping.foreach{ + case (offset, node) => + if node.equals(oldCell.node.get) then + stackMapping.update(offset, newCell.node.get) + } + + private def replace(oldCell: DSC, newCell: DSC) = + replaceInEV(oldCell, newCell) + replaceInPointTo(oldCell, newCell) + replaceInGlobals(oldCell, newCell) + replaceInStack(oldCell, newCell) + def getPointee(cell: DSC): DSC = if !pointTo.contains(cell) then val node = DSN(None, None) @@ -94,11 +204,6 @@ class DSG(proc: Procedure, val collapedCell = DSC(Option(node), 0, true) val e = DSC(None, 0) - if node.id == 20 then - print("") - - - val cell = node.cells.foldLeft(e) { (c, field) => @@ -113,8 +218,7 @@ class DSG(proc: Procedure, node.cells.values.foreach( cell => - replaceInEV(cell, collapedCell) - replaceInPointTo(cell, collapedCell) + replace(cell, collapedCell) pointTo.foreach { case (pointer, pointee) => if pointer.equals(cell) then @@ -133,6 +237,8 @@ class DSG(proc: Procedure, def mergeCells(cell1: DSC, cell2: DSC): DSC = + if cell2.node.get.id == 31 then + print("") if (cell1 == cell2) { return cell1 } @@ -153,8 +259,9 @@ class DSG(proc: Procedure, else pointTo.update(cell2.node.get.cells(0), getPointee(cell)) pointTo.remove(cell) - replaceInPointTo(cell, cell2.node.get.cells(0)) - replaceInEV(cell, cell2.node.get.cells(0)) +// replaceInPointTo(cell, cell2.node.get.cells(0)) +// replaceInEV(cell, cell2.node.get.cells(0)) + replace(cell, cell2.node.get.cells(0)) } cell2.node.get.cells(0) else @@ -164,8 +271,9 @@ class DSG(proc: Procedure, else pointTo.update(cell2.node.get.cells(0), getPointee(cell1)) pointTo.remove(cell1) - replaceInPointTo(cell1, cell2.node.get.cells(0)) - replaceInEV(cell1, cell2.node.get.cells(0)) +// replaceInPointTo(cell1, cell2.node.get.cells(0)) +// replaceInEV(cell1, cell2.node.get.cells(0)) + replace(cell1, cell2.node.get.cells(0)) cell2.node.get.cells(0) else cell1.node.get.cells.foreach { @@ -176,8 +284,9 @@ class DSG(proc: Procedure, else pointTo.update(cell2.node.get.cells(offset), getPointee(cell)) pointTo.remove(cell) - replaceInPointTo(cell, cell2.node.get.cells(offset)) - replaceInEV(cell, cell2.node.get.cells(offset)) +// replaceInPointTo(cell, cell2.node.get.cells(offset)) +// replaceInEV(cell, cell2.node.get.cells(offset)) + replace(cell, cell2.node.get.cells(offset)) } cell2 @@ -244,39 +353,18 @@ class DSG(proc: Procedure, } -// def coolMergeCells(cell1: DSC, cell2: DSC): DSC = -// val changedFormals = formals.foldLeft(Set[Variable]()) { -// (s, f) => -// if f._2 == cell1 then -// s + f._1 -// else -// s -// } -// -// val changedVars = varToCell.foldLeft(Map[CFGPosition, Set[Variable]]()) { -// -// } -// -// cell1 - - - def addNode(memoryRegion2: MemoryRegion2, offset: BigInt, size: Int): DSN = ??? -// if nodes.contains(memoryRegion2) then -// nodes(memoryRegion2).addCell(offset, size) -// else -// val node = DSN(Some(this), Some(memoryRegion2)) -// nodes.update(memoryRegion2, node) -// node.addCell(offset, size) -// nodes(memoryRegion2) } class DSN(val graph: Option[DSG], var region: Option[MemoryRegion2]) { val id: Int = NodeCounter.getCounter + if id == 31 then + print("") + var collapsed = false val allocationRegions: mutable.Set[MemoryRegion2] = region match diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index 7a887b58e..4789395ef 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -5,6 +5,7 @@ import specification.{ExternalFunction, SpecGlobal} import scala.util.control.Breaks.{break, breakable} import java.math.BigInteger +import scala.collection.mutable class Local( proc: Procedure, @@ -21,20 +22,46 @@ class Local( val stackPointer = Register("R31", BitVecType(64)) + val varToSym: Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]] = symResults.foldLeft(Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]]()) { (outerMap, syms) => val position = syms._1 val innerMap = syms._2.foldLeft(Map[Variable, Set[SymbolicAccess]]()) { (m, access) => - val b = position if (m.contains(access._1.accessor)) then + // every variable pointing to a stack region ONLY has one symbolic access associated with it. + m(access._1.accessor).foreach( + sym => assert(!sym.symbolicBase.isInstanceOf[StackRegion2]) + ) + assert(!access._1.symbolicBase.isInstanceOf[StackRegion2]) m + (access._1.accessor -> (m(access._1.accessor) + access._1)) else m + (access._1.accessor -> Set(access._1)) } - outerMap + (position -> innerMap) } + + + def isStack(expr: Expr, pos: CFGPosition): Option[DSC] = + expr match + case BinaryExpr(op, arg1: Variable, arg2) if varToSym.contains(pos) && varToSym(pos).contains(arg1) && + varToSym(pos)(arg1).size == 1 && varToSym(pos)(arg1).head.symbolicBase.isInstanceOf[StackRegion2] && + evaluateExpression(arg2, constProp(pos)).isDefined => + val offset = evaluateExpression(arg2, constProp(pos)).get.value + varToSym(pos)(arg1).head.offset + if graph.stackMapping.contains(offset) then + Some(graph.stackMapping(offset).cells(0)) + else + None + case arg: Variable if varToSym.contains(pos) && varToSym(pos).contains(arg) && + varToSym(pos)(arg).size == 1 && varToSym(pos)(arg).head.symbolicBase.isInstanceOf[StackRegion2] => + val offset = varToSym(pos)(arg).head.offset + if graph.stackMapping.contains(offset) then + Some(graph.stackMapping(offset).cells(0)) + else + None + case _ => None + + def decToBinary(n: BigInt): Array[Int] = { val binaryNum: Array[Int] = new Array[Int](64) @@ -69,7 +96,7 @@ class Local( s"malloc_$mallocCount" } - val graph = DSG(proc, constProp, globals, globalOffsets, externalFunctions, reachingDefs, writesTo) + val graph = DSG(proc, constProp, varToSym, globals, globalOffsets, externalFunctions, reachingDefs, writesTo) def isGlobal(expr: Expr, pos: CFGPosition, size: Int = 0): Option[DSC] = @@ -98,8 +125,8 @@ class Local( def getNodes(pos: CFGPosition, arg: Variable): Set[DSN] = if reachingDefs(pos).contains(arg) then reachingDefs(pos)(arg).foldLeft(Set[DSN]()){ - (s, defintion) => - s + graph.varToCell(defintion)(arg).node.get + (s, definition) => + s + graph.varToCell(definition)(arg).node.get } else Set(graph.formals(arg).node.get) @@ -117,10 +144,18 @@ class Local( case LocalAssign(variable, expr, maybeString) => val lhsCell = graph.varToCell(n)(variable) + if maybeString.get.startsWith("%0000031f") then + print("") + if maybeString.get.startsWith("%00000325") then + print("") if isGlobal(expr, n).isDefined then val global = isGlobal(expr, n).get val result = graph.mergeCells(lhsCell, global) graph.varToCell(n).update(variable, result) + else if isStack(expr, n).isDefined then // just in case stack can't be recognised in after this assignment + val stack = isStack(expr, n).get + val result = graph.mergeCells(lhsCell, stack) + graph.varToCell(n).update(variable, result) else expr match case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) && arg1.equals(stackPointer) @@ -130,7 +165,7 @@ class Local( graph.nodes.add(node) val cell = graph.mergeCells(lhsCell, node.cells(0)) graph.varToCell(n).update(variable, cell) - case BinaryExpr(op, arg1: Variable, arg2) if varToSym(n).contains(arg1) && evaluateExpression(arg2, constProp(n)).isDefined => + case BinaryExpr(op, arg1: Variable, arg2) if varToSym.contains(n) && varToSym(n).contains(arg1) && evaluateExpression(arg2, constProp(n)).isDefined => val offset = evaluateExpression(arg2, constProp(n)).get.value val nodes: Set[DSN] = getNodes(n, arg1) nodes.foreach(_.addCell(offset, 0)) @@ -159,6 +194,10 @@ class Local( val global = isGlobal(index, n, byteSize).get val result = graph.mergeCells(lhsCell, graph.getPointee(global)) graph.varToCell(n).update(variable, result) + else if isStack(index, n).isDefined then + val stack = isStack(index, n).get + val result = graph.mergeCells(lhsCell, graph.getPointee(stack)) + graph.varToCell(n).update(variable, result) else index match case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => @@ -189,7 +228,7 @@ class Local( breakable { var containsPointer = false for (v <- expr.variables) { - if varToSym(n).contains(v) then + if varToSym.contains(n) && varToSym(n).contains(v) then containsPointer = true break } @@ -208,10 +247,14 @@ class Local( graph.varToCell(n).update(variable, node.cells(0)) } case MemoryAssign(memory, MemoryStore(mem, index, value: Variable, endian, size), label) => + if n.isInstanceOf[MemoryAssign] && n.asInstanceOf[MemoryAssign].label.get.startsWith("%00000318") then + print("") val byteSize = (size.toDouble/8).ceil.toInt val addressCell: DSC = if isGlobal(index, n, byteSize).isDefined then isGlobal(index, n, byteSize).get + else if isStack(index, n).isDefined then + isStack(index, n).get else index match case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => @@ -256,8 +299,9 @@ class Local( } def analyze(): Any = val domain = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).reverse + + -// println(domain) domain.foreach(visit) diff --git a/src/main/scala/analysis/ReachingDefs.scala b/src/main/scala/analysis/ReachingDefs.scala index 503b1f78d..e1ef02488 100644 --- a/src/main/scala/analysis/ReachingDefs.scala +++ b/src/main/scala/analysis/ReachingDefs.scala @@ -1,6 +1,6 @@ package analysis -import analysis.solvers.SimpleWorklistFixpointSolver +import analysis.solvers.SimplePushDownWorklistFixpointSolver import ir.{Assert, Assume, BitVecType, CFGPosition, Call, DirectCall, Expr, GoTo, IndirectCall, InterProcIRCursor, IntraProcIRCursor, LocalAssign, MemoryAssign, NOP, Procedure, Program, Register, Variable, computeDomain} abstract class ReachingDefs(program: Program, writesTo: Map[Procedure, Set[Register]]) extends Analysis[Map[CFGPosition, Map[Variable, Set[CFGPosition]]]] { @@ -10,6 +10,8 @@ abstract class ReachingDefs(program: Program, writesTo: Map[Procedure, Set[Regis val lattice: MapLattice[CFGPosition, Map[Variable, Set[CFGPosition]], MapLattice[Variable, Set[CFGPosition], PowersetLattice[CFGPosition]]] = new MapLattice(new MapLattice(new PowersetLattice[CFGPosition]())) def transfer(n: CFGPosition, s: Map[Variable, Set[CFGPosition]]): Map[Variable, Set[CFGPosition]] = + if n.isInstanceOf[LocalAssign] && n.asInstanceOf[LocalAssign].label.get.startsWith("%000004f4") then + print("") n match case loc:LocalAssign => s + (loc.lhs -> Set(n)) @@ -25,6 +27,6 @@ abstract class ReachingDefs(program: Program, writesTo: Map[Procedure, Set[Regis } -class ReachingDefsAnalysis(program: Program, writesTo: Map[Procedure, Set[Register]]) extends ReachingDefs(program, writesTo), IRInterproceduralForwardDependencies, - SimpleWorklistFixpointSolver[CFGPosition, Map[Variable, Set[CFGPosition]], MapLattice[Variable, Set[CFGPosition], PowersetLattice[CFGPosition]]] +class ReachingDefsAnalysis(program: Program, writesTo: Map[Procedure, Set[Register]]) extends ReachingDefs(program, writesTo), IRIntraproceduralForwardDependencies, + SimplePushDownWorklistFixpointSolver[CFGPosition, Map[Variable, Set[CFGPosition]], MapLattice[Variable, Set[CFGPosition], PowersetLattice[CFGPosition]]] From 24e22adc113e8351a3585f8256c845b7f26e2c40 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Wed, 1 May 2024 10:47:10 +1000 Subject: [PATCH 007/104] fixed pointer arithmetic with offset calculations --- src/main/scala/analysis/DSAUtility.scala | 435 ++++++++++-------- src/main/scala/analysis/Local.scala | 237 +++++----- src/main/scala/analysis/ReachingDefs.scala | 2 - .../analysis/SymbolicAccessAnalysis.scala | 11 +- src/main/scala/analysis/WriteToAnalysis.scala | 2 +- src/main/scala/util/RunUtils.scala | 34 +- src/test/scala/LocalTest.scala | 56 +++ 7 files changed, 443 insertions(+), 334 deletions(-) create mode 100644 src/test/scala/LocalTest.scala diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index 3566b8f5b..880b36de2 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -14,8 +14,6 @@ object NodeCounter { def getCounter: Int = counter = counter + 1 - if counter == 64 then - print("") counter @@ -41,81 +39,50 @@ class DSG(val proc: Procedure, (results, pos) => stackBuilder(pos, results) }.to(collection.mutable.Map) - def stackBuilder(pos: CFGPosition, m: Map[BigInt, DSN]): Map[BigInt, DSN] = { + private def visitStackAccess(pos: CFGPosition, index: Expr, size: Int, m: Map[BigInt, DSN]) : Map[BigInt, DSN] = + val byteSize = (size.toDouble / 8).ceil.toInt + index match + case BinaryExpr(op, arg1: Variable, arg2) if varToSym.contains(pos) && varToSym(pos).contains(arg1) && + evaluateExpression(arg2, constProp(pos)).isDefined => + var offset = evaluateExpression(arg2, constProp(pos)).get.value + varToSym(pos)(arg1).foldLeft(m) { + (m, sym) => + sym match + case SymbolicAccess(accessor, StackRegion2(regionIdentifier, proc, size), symOffset) => + offset = offset + symOffset + if m.contains(offset) then + assert(!m(offset).cells(0).growSize(byteSize)) + m + else + val node = DSN(Some(this), Some(StackRegion2(pos.toShortString, proc, byteSize))) + node.addCell(0, byteSize) + m + (offset -> node) + case _ => m + } + case arg: Variable if varToSym.contains(pos) && varToSym(pos).contains(arg) => + varToSym(pos)(arg).foldLeft(m) { + (m, sym) => + sym match + case SymbolicAccess(accessor, StackRegion2(regionIdentifier, proc, size), offset) => + if m.contains(offset) then + assert(!m(offset).cells(0).growSize(byteSize)) + m + else + val node = DSN(Some(this), Some(StackRegion2(pos.toShortString, proc, byteSize))) + node.addCell(0, byteSize) + m + (offset -> node) + case _ => m + } + case _ => m + private def stackBuilder(pos: CFGPosition, m: Map[BigInt, DSN]): Map[BigInt, DSN] = { pos match case LocalAssign(variable: Variable, expr: Expr, _) => expr match case MemoryLoad(mem, index, endian, size) => - val byteSize = (size.toDouble / 8).ceil.toInt - index match - case BinaryExpr(op, arg1: Variable, arg2) if varToSym.contains(pos) && varToSym(pos).contains(arg1) && - evaluateExpression(arg2, constProp(pos)).isDefined => - var offset = evaluateExpression(arg2, constProp(pos)).get.value - varToSym(pos)(arg1).foldLeft(m) { - (m, sym) => - sym match - case SymbolicAccess(accessor, StackRegion2(regionIdentifier, proc, size), symOffset) => - offset = offset + symOffset - if m.contains(offset) then - m(offset).addCell(0, byteSize) - m - else - val node = DSN(Some(this), Some(StackRegion2(pos.toShortString, proc, byteSize))) - node.addCell(0, byteSize) - m + (offset -> node) - case _ => m - } - case arg: Variable if varToSym.contains(pos) && varToSym(pos).contains(arg) => - varToSym(pos)(arg).foldLeft(m) { - (m, sym) => - sym match - case SymbolicAccess(accessor, StackRegion2(regionIdentifier, proc, size), offset) => - if m.contains(offset) then - m(offset).addCell(0, byteSize) - m - else - val node = DSN(Some(this), Some(StackRegion2(pos.toShortString, proc, byteSize))) - node.addCell(0, byteSize) - m + (offset -> node) - case _ => m - } - case _ => m + visitStackAccess(pos, index, size, m) case _ => m case MemoryAssign(mem, MemoryStore(mem2, index, value, endian, size), label) => - val byteSize = (size.toDouble / 8).ceil.toInt - index match - case BinaryExpr(op, arg1: Variable, arg2) if varToSym.contains(pos) && varToSym(pos).contains(arg1) && - evaluateExpression(arg2, constProp(pos)).isDefined => - var offset = evaluateExpression(arg2, constProp(pos)).get.value - varToSym(pos)(arg1).foldLeft(m) { - (m, sym) => - sym match - case SymbolicAccess(accessor, StackRegion2(regionIdentifier, proc, size), symOffset) => - offset = offset + symOffset - if m.contains(offset) then - m(offset).addCell(0, byteSize) - m - else - val node = DSN(Some(this), Some(StackRegion2(pos.toShortString, proc, byteSize))) - node.addCell(0, byteSize) - m + (offset -> node) - case _ => m - } - case arg: Variable if varToSym.contains(pos) && varToSym(pos).contains(arg) => - varToSym(pos)(arg).foldLeft(m) { - (m, sym) => - sym match - case SymbolicAccess(accessor, StackRegion2(regionIdentifier, proc, size), offset) => - if m.contains(offset) then - m(offset).addCell(0, byteSize) - m - else - val node = DSN(Some(this), Some(StackRegion2(pos.toShortString, proc, byteSize))) - node.addCell(0, byteSize) - m + (offset -> node) - case _ => m - } - case _ => m + visitStackAccess(pos, index, size, m) case _ => m } @@ -123,24 +90,24 @@ class DSG(val proc: Procedure, // make all globals private val swappedOffsets = globalOffsets.map(_.swap) - val globalMapping: mutable.Map[(BigInt, BigInt), DSN] = globals.foldLeft(mutable.Map[(BigInt, BigInt), DSN]()) { + val globalMapping: mutable.Map[(BigInt, BigInt), (DSN, BigInt)] = globals.foldLeft(mutable.Map[(BigInt, BigInt), (DSN, BigInt)]()) { (m, global) => var address: BigInt = global.address if swappedOffsets.contains(address) then address = swappedOffsets(address) - m + ((address, address + global.size) -> DSN(Some(this), Some(DataRegion2(global.name, address, global.size)))) + m + ((address, address + global.size/8) -> (DSN(Some(this), Some(DataRegion2(global.name, address, global.size))), 0)) } externalFunctions.foreach( external => var address: BigInt = external.offset if swappedOffsets.contains(address) then address = swappedOffsets(address) - globalMapping.update((address, address), DSN(Some(this), Some(DataRegion2(external.name, address, 0)))) + globalMapping.update((address, address), (DSN(Some(this), Some(DataRegion2(external.name, address, 0))), 0)) ) // determine if an address is a global and return the corresponding global if it is. - def isGlobal(address: BigInt): Option[DSN] = + def isGlobal(address: BigInt): Option[(DSN, BigInt)] = for (elem <- globalMapping) { val range = elem._1 if address >= range._1 && address <= range._2 then @@ -148,16 +115,22 @@ class DSG(val proc: Procedure, } None - private def replaceInEV(oldCell: DSC, newCell: DSC) = + private def replaceInEV(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = varToCell.foreach( (pos, m) => - m.foreach( - (variable, cell) => - if cell.equals(oldCell) then - m.update(variable, newCell) - ) + m.foreach { + case (variable, (cell, offset)) => + if cell.equals(oldCell) then + m.update(variable, (newCell, offset + internalOffsetChange)) + } ) + formals.foreach{ + case (variable, (cell, offset)) => + if cell.equals(oldCell) then + formals.update(variable, (newCell, offset + internalOffsetChange)) + } + private def replaceInPointTo(oldCell: DSC, newCell: DSC) = pointTo.foreach { case (pointer, pointee) => @@ -168,9 +141,11 @@ class DSG(val proc: Procedure, private def replaceInGlobals(oldCell: DSC, newCell: DSC) = if oldCell.node.isDefined then globalMapping.foreach { - case (key, node) => + case (key, tuple) => + val node = tuple._1 + val offset = tuple._2 if node.equals(oldCell.node.get) then - globalMapping.update(key, newCell.node.get) + globalMapping.update(key, (newCell.node.get, offset)) } private def replaceInStack(oldCell: DSC, newCell: DSC) = @@ -181,27 +156,22 @@ class DSG(val proc: Procedure, stackMapping.update(offset, newCell.node.get) } - private def replace(oldCell: DSC, newCell: DSC) = - replaceInEV(oldCell, newCell) + private def replace(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = + replaceInEV(oldCell, newCell, internalOffsetChange) replaceInPointTo(oldCell, newCell) replaceInGlobals(oldCell, newCell) replaceInStack(oldCell, newCell) def getPointee(cell: DSC): DSC = if !pointTo.contains(cell) then - val node = DSN(None, None) + val node = DSN(Some(this), None) pointTo.update(cell, node.cells(0)) pointTo(cell) - -// private def earlyCollapse(node: DSN): Unit = -// node.collapsed = true -// node.cells.clear() -// -// node.addCell(0, 0) + def collapseNode(node: DSN): Unit = - val collapedCell = DSC(Option(node), 0, true) + val collapedCell = DSC(Option(node), 0) val e = DSC(None, 0) val cell = node.cells.foldLeft(e) { @@ -218,7 +188,7 @@ class DSG(val proc: Procedure, node.cells.values.foreach( cell => - replace(cell, collapedCell) + replace(cell, collapedCell, 0) pointTo.foreach { case (pointer, pointee) => if pointer.equals(cell) then @@ -235,126 +205,200 @@ class DSG(val proc: Procedure, if cell.node.isDefined then pointTo.update(node.cells(0), cell) - - def mergeCells(cell1: DSC, cell2: DSC): DSC = - if cell2.node.get.id == 31 then - print("") - if (cell1 == cell2) { - return cell1 + def optionalCollapse(node: DSN): Unit = { + var lastOffset: BigInt = -1 + var lastAccess: BigInt = -1 + val removed = mutable.Set[BigInt]() + node.cells.toSeq.sortBy(_._1).foreach { + case (offset: BigInt, cell: DSC) => + if lastOffset + lastAccess > offset then + val result = mergeNeighbours(node.cells(lastOffset), cell) + removed.add(offset) + lastAccess = result.largestAccessedSize + else + lastOffset = offset + lastAccess = cell.largestAccessedSize } - if (incompatibleTypes(cell1, cell2)) then - collapseNode(cell2.node.get) + removed.foreach(node.cells.remove) + } - if cell1.node.isDefined then - cell2.node.get.allocationRegions.addAll(cell1.node.get.allocationRegions) + def mergeNeighbours(cell1: DSC, cell2: DSC): DSC = + require(cell1.node.equals(cell2.node) && cell1.offset < cell2.offset) + if pointTo.contains(cell2) then + if pointTo.contains(cell1) then + mergeCells(getPointee(cell1), getPointee(cell2)) + else + pointTo.update(cell1, getPointee(cell2)) + pointTo.remove(cell2) + val internalOffsetChange = cell2.offset - cell1.offset + replace(cell2, cell1, internalOffsetChange) + cell1.growSize(cell2.offset + cell2.largestAccessedSize) // might cause another collapse + cell1 - if cell2.node.get.collapsed then - if cell1.node.isDefined then - cell1.node.get.cells.foreach { - case (offset, cell) => - if pointTo.contains(cell) then - if pointTo.contains(cell2.node.get.cells(0)) then - mergeCells(getPointee(cell), getPointee(cell2.node.get.cells(0))) - else - pointTo.update(cell2.node.get.cells(0), getPointee(cell)) - pointTo.remove(cell) -// replaceInPointTo(cell, cell2.node.get.cells(0)) -// replaceInEV(cell, cell2.node.get.cells(0)) - replace(cell, cell2.node.get.cells(0)) - } - cell2.node.get.cells(0) - else - if pointTo.contains(cell1) then - if pointTo.contains(cell2.node.get.cells(0)) then - mergeCells(getPointee(cell1), getPointee(cell2.node.get.cells(0))) - else - pointTo.update(cell2.node.get.cells(0), getPointee(cell1)) - pointTo.remove(cell1) -// replaceInPointTo(cell1, cell2.node.get.cells(0)) -// replaceInEV(cell1, cell2.node.get.cells(0)) - replace(cell1, cell2.node.get.cells(0)) - cell2.node.get.cells(0) + def mergeCells(cell1: DSC, cell2: DSC): DSC = + + if cell1.equals(cell2) then + cell1 + else if cell1.node.isDefined && cell1.node.equals(cell2.node) then + collapseNode(cell1.node.get) + cell1.node.get.cells(0) + else if cell1.node.isEmpty then + replace(cell1, cell2, 0) + cell2 + else if cell1.node.get.collapsed || cell2.node.get.collapsed then + val node1 = cell1.node.get + val node2 = cell2.node.get + collapseNode(node1) + collapseNode(node2) + node2.allocationRegions.addAll(node1.allocationRegions) + if node2.region.isEmpty then + node2.region = node1.region + if pointTo.contains(node1.cells(0)) then + if pointTo.contains(node2.cells(0)) then + pointTo.update(node2.cells(0), mergeCells(getPointee(node1.cells(0)), getPointee(node2.cells(0)))) + else + pointTo.update(node2.cells(0), getPointee(node1.cells(0))) + pointTo.remove(node1.cells(0)) + replace(node1.cells(0), node2.cells(0), 0) + node2.cells(0) + else if cell1.node.get.allocationRegions.isEmpty && cell1.offset == 0 && cell1.node.get.cells.size == 1 && cell1.largestAccessedSize == 0 && // + !pointTo.contains(cell1) && pointTo.values.foldLeft(true) { + (condition, cell) => cell != cell1 && condition + } then + replace(cell1, cell2, 0) + cell2 else - cell1.node.get.cells.foreach { - case (offset, cell) => - if pointTo.contains(cell) then - if pointTo.contains(cell2.node.get.cells(offset)) then - mergeCells(getPointee(cell), getPointee(cell2.node.get.cells(offset))) - else - pointTo.update(cell2.node.get.cells(offset), getPointee(cell)) - pointTo.remove(cell) -// replaceInPointTo(cell, cell2.node.get.cells(offset)) -// replaceInEV(cell, cell2.node.get.cells(offset)) - replace(cell, cell2.node.get.cells(offset)) + + var delta = cell1.offset - cell2.offset + var node1 = cell1.node.get + var node2 = cell2.node.get + if cell1.offset < cell2.offset then + delta = cell2.offset - cell1.offset + node1 = cell2.node.get + node2 = cell1.node.get + + + val cells : Seq[(BigInt, DSC)] = (node1.cells.toSeq ++ node2.cells.foldLeft(Seq[(BigInt, DSC)]()){ + (s, tuple) => + val offset = tuple._1 + val cell = tuple._2 + s:+ ((offset + delta, cell)) + }).sortBy(_._1) + + var lastOffset: BigInt = -1 + var lastAccess: BigInt = -1 + val resultNode = DSN(Some(this), node1.region) + resultNode.allocationRegions.addAll(node1.allocationRegions ++ node2.allocationRegions) + if node1.region.isDefined then + resultNode.region = node1.region + else if node2.region.isDefined then + resultNode.region = node2.region + if node2.region.get.isInstanceOf[DataRegion2] then + globalMapping.foreach{ + case ((start: BigInt, end: BigInt), (node:DSN, offset: BigInt)) => + if node.equals(node2) then + globalMapping.update((start, end), (node, offset + delta)) + } + val resultCells: mutable.Map[BigInt, (Set[DSC], BigInt)] = mutable.Map() + cells.foreach { + case (offset: BigInt, cell: DSC) => + if (lastOffset + lastAccess > offset) || lastOffset == offset then // includes this cell + if (offset - lastOffset) + cell.largestAccessedSize > lastAccess then + lastAccess = (offset - lastOffset) + cell.largestAccessedSize + resultCells.update(offset, (resultCells(offset)._1 + cell, lastAccess)) + else + lastOffset = offset + lastAccess = cell.largestAccessedSize + resultCells.update(lastOffset, (Set(cell), lastAccess)) } - cell2 - - private def incompatibleTypes(cell1: DSC, cell2: DSC): Boolean = - if cell2.node.get.collapsed then - return false - else if cell1.node.isEmpty || (cell1.collapsedCell && !cell2.collapsedCell) then - return true // TODO not sure about this - else if cell1.offset != cell2.offset then - return true - else if cell1.node.get.cells.size != cell2.node.get.cells.size then - return true - else - (cell1.node.get.cells zip cell2.node.get.cells).foreach { //TODO remove unaccessed cells from type matching/allow unaccessed fields to merge with an accessed field - case ((o1, c1), (o2, c2)) => - if o1 != o2 || !c1.accessedSizes.equals(c2.accessedSizes) then - return true + resultCells.foreach { + case (offset: BigInt, (cells: Set[DSC], largestAccess: BigInt)) => + val collapsedCell = resultNode.addCell(offset, largestAccess)._1 + val outgoing: Set[DSC] = cells.foldLeft(Set()){ + (set, cell) => + // replace incoming edges + if cell.node.get.equals(node2) then + replace(cell, collapsedCell, delta + cell.offset - offset) // TODO reconsider offsets + else + assert(cell.node.get.equals(node1)) + replace(cell, collapsedCell, cell.offset - offset) + + // collect outgoing edges + if pointTo.contains(cell) then + val pointee = getPointee(cell) + pointTo.remove(cell) + set + pointee + else + set + } + // replace outgoing edges TODO might have to move this out after all cells have been processed + if outgoing.size == 1 then + pointTo.update(collapsedCell, outgoing.head) + else if outgoing.size > 1 then + val result = outgoing.tail.foldLeft(outgoing.head){ + (result, cell) => + mergeCells(result, cell) + } + pointTo.update(collapsedCell, result) } - false + + if cell1.offset >= cell2.offset then + resultNode.cells(cell1.offset) + else + resultNode.cells(cell2.offset) private def isFormal(pos: CFGPosition, variable: Variable): Boolean = variable != stackPointer && !reachingDefs(pos).contains(variable) - val formals: mutable.Map[Variable, DSC] = mutable.Map() - val varToCell: Map[CFGPosition, mutable.Map[Variable, DSC]] = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(Map[CFGPosition, mutable.Map[Variable, DSC]]()) { + def unwrapPaddingAndSlicing(expr: Expr): Expr = + expr match + case Extract(end, start, body) if start == 0 && end == 32 => unwrapPaddingAndSlicing(body) + case ZeroExtend(extension, body) => unwrapPaddingAndSlicing(body) + case _ => expr + + val formals: mutable.Map[Variable, (DSC, BigInt)] = mutable.Map() + val varToCell: Map[CFGPosition, mutable.Map[Variable, (DSC, BigInt)]] = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(Map[CFGPosition, mutable.Map[Variable, (DSC, BigInt)]]()) { (m, pos) => pos match case LocalAssign(variable, value , label) => - if pos.asInstanceOf[LocalAssign].label.get.startsWith("%0000044f") then - print("") value.variables.foreach( v => if isFormal(pos, v) then val node = DSN(Some(this), None) node.rep = "formal" nodes.add(node) - formals.update(v, node.cells(0)) + formals.update(v, (node.cells(0), 0)) ) val node = DSN(Some(this), None) node.rep = "ssa" - m + (pos -> mutable.Map(variable -> node.cells(0))) + m + (pos -> mutable.Map(variable -> (node.cells(0), 0))) case DirectCall(proc, target, label) if proc.name == "malloc" => val node = DSN(Some(this), None) node.rep = "ssa" - m + (pos -> mutable.Map(mallocRegister -> node.cells(0))) + m + (pos -> mutable.Map(mallocRegister -> (node.cells(0), 0))) case DirectCall(proc, target, label) if writesTo.contains(proc) => - val result: Map[Variable, DSC] = writesTo(proc).foldLeft(Map[Variable, DSC]()){ + val result: Map[Variable, (DSC, BigInt)] = writesTo(proc).foldLeft(Map[Variable, (DSC, BigInt)]()){ (n, variable) => val node = DSN(Some(this), None) node.rep = "ssa" - n + (variable -> node.cells(0)) + n + (variable -> (node.cells(0), 0)) } m + (pos -> result.to(mutable.Map)) - case MemoryAssign(memory, MemoryStore(mem, index, value: Variable, endian, size), label) => + case MemoryAssign(memory, MemoryStore(mem, index, expr: Expr, endian, size), label) if unwrapPaddingAndSlicing(expr).isInstanceOf[Variable] => + val value: Variable = unwrapPaddingAndSlicing(expr).asInstanceOf[Variable] if isFormal(pos, value) then val node = DSN(Some(this), None) node.rep = "formal" nodes.add(node) - formals.update(value, node.cells(0)) + formals.update(value, (node.cells(0), 0)) m case _ => m } - - def addNode(memoryRegion2: MemoryRegion2, offset: BigInt, size: Int): DSN = ??? } @@ -362,9 +406,6 @@ class DSN(val graph: Option[DSG], var region: Option[MemoryRegion2]) { val id: Int = NodeCounter.getCounter - if id == 31 then - print("") - var collapsed = false val allocationRegions: mutable.Set[MemoryRegion2] = region match @@ -388,16 +429,24 @@ class DSN(val graph: Option[DSG], var region: Option[MemoryRegion2]) { if newSize > size then size = newSize - def addCell(offset: BigInt, size: Int) = + + def addCell(offset: BigInt, size: BigInt) : (DSC, BigInt) = this.updateSize(offset + size) if !cells.contains(offset) then + cells.foreach{ + case (start:BigInt, cell:DSC) => + if start < offset && offset < (start + cell.largestAccessedSize) then + val internalOffset = offset - start + cell.growSize(internalOffset + size) + return (cell, internalOffset) + } val cell = DSC(Some(this), offset) cells.update(offset, cell) - cell.addAccessedSize(size) + cell.growSize(size) + (cell, 0) else - cells(offset).addAccessedSize(size) - if cells(offset).accessedSizes.size > 1 then - graph.get.collapseNode(this) + cells(offset).growSize(size) + (cells(offset), 0) override def equals(obj: Any): Boolean = @@ -409,32 +458,20 @@ class DSN(val graph: Option[DSG], var region: Option[MemoryRegion2]) { override def toString: String = s"Node($id, $allocationRegions ${if collapsed then ", collapsed" else ""})" } -case class DSC(node: Option[DSN], offset: BigInt, collapsedCell: Boolean = false) +case class DSC(node: Option[DSN], offset: BigInt) { - val accessedSizes: mutable.Set[Int] = mutable.Set() - def addAccessedSize(size: Int): Unit = - if size != 0 then accessedSizes.add(size) + var largestAccessedSize: BigInt = 0 - override def toString: String = s"Cell(${if node.isDefined then node.get.toString else "NONE"}, $offset)" -} + def growSize(size: BigInt): Boolean = + if size > largestAccessedSize then + largestAccessedSize = size + true + else false -class SimulationMapper -{ + override def toString: String = s"Cell(${if node.isDefined then node.get.toString else "NONE"}, $offset)" } -class Field {} -class Offset -{} - -class Alloc -{} - -class CallSite -{ - -} - diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index 4789395ef..cc4dc6503 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -21,6 +21,7 @@ class Local( val mallocRegister = Register("R0", BitVecType(64)) val stackPointer = Register("R31", BitVecType(64)) + private val visited: mutable.Set[CFGPosition] = mutable.Set() val varToSym: Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]] = symResults.foldLeft(Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]]()) { @@ -102,36 +103,83 @@ class Local( def isGlobal(expr: Expr, pos: CFGPosition, size: Int = 0): Option[DSC] = if evaluateExpression(expr, constProp(pos)).isDefined && graph.isGlobal(evaluateExpression(expr, constProp(pos)).get.value).isDefined then val address = evaluateExpression(expr, constProp(pos)).get.value - val node: DSN = graph.isGlobal(evaluateExpression(expr, constProp(pos)).get.value).get + val (node: DSN, internal: BigInt) = graph.isGlobal(evaluateExpression(expr, constProp(pos)).get.value).get val baseAddress = node.region.get.asInstanceOf[DataRegion2].start val offset = address - baseAddress - node.addCell(offset, size) + node.addCell(internal + offset, size) + graph.optionalCollapse(node) if node.collapsed then Some(node.cells(0)) else - Some(node.cells(offset)) + Some(node.addCell(internal + offset, 0)._1) else None - def getCells(pos: CFGPosition, arg: Variable): Set[DSC] = + def getCells(pos: CFGPosition, arg: Variable): Set[(DSC, BigInt)] = if reachingDefs(pos).contains(arg) then - reachingDefs(pos)(arg).foldLeft(Set[DSC]()) { + reachingDefs(pos)(arg).foldLeft(Set[(DSC, BigInt)]()) { (s, defintion) => s + graph.varToCell(defintion)(arg) } else Set(graph.formals(arg)) - def getNodes(pos: CFGPosition, arg: Variable): Set[DSN] = - if reachingDefs(pos).contains(arg) then - reachingDefs(pos)(arg).foldLeft(Set[DSN]()){ - (s, definition) => - s + graph.varToCell(definition)(arg).node.get - } - else - Set(graph.formals(arg).node.get) + // this function is used to ignore slicing and padding between 32 bit and 64 bit values + // this can introduce unsoundness + def unwrapPaddingAndSlicing(expr: Expr): Expr = + expr match + case Extract(end, start, body) if start == 0 && end == 32 => unwrapPaddingAndSlicing(body) + case ZeroExtend(extension, body) => unwrapPaddingAndSlicing(body) + case _ => expr + + + + /** + * Handles unification for instructions of the form R_x = R_y [+ offset] where R_y is a pointer and [+ offset] is optional + * @param position the cfg position being visited (note this might be a local assign of the form R_x = R_y [+ offset] + * or it might be memory load/store where the index is of the form R_y [+ offset] + * @param lhs Ev(R_x) if position is local assign or a cell from an empty node if R_y [+ offset] is the index of a memoryAssign + * @param rhs R_y, reachingDefs(position)(R_y) can be used to find the set of SSA variables that may define R_x + * @param pointee if false, the position is local pointer arithmetic therefore Ev(R_y [+ offset]) is merged with lhs + * else, the position is a memory read/write therefore E(Ev(R_y [+ offset])) is merged with lhs + * @param offset offset if [+ offset] is present + * @return the cell resulting from the unification + */ + private def visitPointerArithmeticOperation(position: CFGPosition, lhs: DSC, rhs: Variable, size: Int, pointee: Boolean = false, offset: BigInt = 0, collapse: Boolean = false) : DSC = + // visit all the defining pointer operation on rhs variable first + reachingDefs(position)(rhs).foreach(visit) + // get the cells of all the SSA variables in the set + val cells: Set[(DSC, BigInt)] = getCells(position, rhs) + // merge the cells or their pointees with lhs + cells.foldLeft(lhs) { + (c, t) => + val cell = t._1 + val internalOffset = t._2 + if offset != 0 then // it's R_x = R_y + offset + val node = cell.node.get // get the node of R_y + var field = offset + cell.offset + internalOffset // calculate the total offset + node.addCell(field, size) // add cell there if doesn't already exists + graph.optionalCollapse(node) + if node.collapsed then + field = 0 + graph.mergeCells(c, if pointee then graph.getPointee(node.addCell(field, 0)._1) else node.addCell(field, 0)._1) + else + if collapse then + val node = cell.node.get + graph.collapseNode(node) + graph.mergeCells(c, if pointee then graph.getPointee(node.cells(0)) else node.cells(0)) + else + cell.node.get.addCell(cell.offset, size) //update the size of the cell + graph.optionalCollapse(cell.node.get) + graph.mergeCells(c, if pointee then graph.getPointee(cell.node.get.addCell(cell.offset, 0)._1) else cell.node.get.addCell(cell.offset, 0)._1) + } + def visit(n: CFGPosition): Unit = { + if visited.contains(n) then + return + else + visited.add(n) n match case DirectCall(proc, target, label) if proc.name == "malloc" => val size: BigInt = evaluateExpression(mallocRegister, constProp(n)) match @@ -139,23 +187,17 @@ class Local( case None => 0 val node = DSN(Some(graph), Some(HeapRegion2(nextMallocCount, proc, size))) graph.nodes.add(node) - val cell = graph.mergeCells(graph.varToCell(n)(mallocRegister), node.cells(0)) - graph.varToCell(n).update(mallocRegister, cell) - - case LocalAssign(variable, expr, maybeString) => - val lhsCell = graph.varToCell(n)(variable) - if maybeString.get.startsWith("%0000031f") then - print("") - if maybeString.get.startsWith("%00000325") then - print("") + graph.mergeCells(graph.varToCell(n)(mallocRegister)._1, node.cells(0)) + + case LocalAssign(variable, rhs, maybeString) => + val expr: Expr = unwrapPaddingAndSlicing(rhs) + val lhsCell = graph.varToCell(n)(variable)._1 if isGlobal(expr, n).isDefined then val global = isGlobal(expr, n).get - val result = graph.mergeCells(lhsCell, global) - graph.varToCell(n).update(variable, result) + graph.mergeCells(lhsCell, global) else if isStack(expr, n).isDefined then // just in case stack can't be recognised in after this assignment val stack = isStack(expr, n).get - val result = graph.mergeCells(lhsCell, stack) - graph.varToCell(n).update(variable, result) + graph.mergeCells(lhsCell, stack) else expr match case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) && arg1.equals(stackPointer) @@ -163,148 +205,90 @@ class Local( val size = twosComplementToDec(decToBinary(evaluateExpression(arg2, constProp(n)).get.value)) val node = DSN(Some(graph), Some(StackRegion2("Stack_"+proc.name, proc, -size))) graph.nodes.add(node) - val cell = graph.mergeCells(lhsCell, node.cells(0)) - graph.varToCell(n).update(variable, cell) - case BinaryExpr(op, arg1: Variable, arg2) if varToSym.contains(n) && varToSym(n).contains(arg1) && evaluateExpression(arg2, constProp(n)).isDefined => - val offset = evaluateExpression(arg2, constProp(n)).get.value - val nodes: Set[DSN] = getNodes(n, arg1) - nodes.foreach(_.addCell(offset, 0)) - val cell = nodes.foldLeft(lhsCell){ - (c, node) => - var field = offset - node.addCell(offset, 0) - if node.collapsed then - field = 0 - graph.mergeCells(c, node.cells(field)) // TODO this causing everything to collapse - } - graph.varToCell(n).update(variable, cell) + graph.mergeCells(lhsCell, node.cells(0)) - case arg: Variable if varToSym(n).contains(arg) => - val cells = getCells(n, arg) + case BinaryExpr(op, arg1: Variable, arg2) if /*varToSym.contains(n) && varToSym(n).contains(arg1) && */ evaluateExpression(arg2, constProp(n)).isDefined => + val offset = evaluateExpression(arg2, constProp(n)).get.value + visitPointerArithmeticOperation(n, lhsCell, arg1, 0, false, offset) - val cell = cells.foldLeft(lhsCell){ - (c, p) => - graph.mergeCells(c, p) // TODO this causing everything to collapse - } - graph.varToCell(n).update(variable, cell) + case arg: Variable /*if varToSym.contains(n) && varToSym(n).contains(arg)*/ => + visitPointerArithmeticOperation(n, lhsCell, arg, 0) case MemoryLoad(mem, index, endian, size) => val byteSize = (size.toDouble/8).ceil.toInt if isGlobal(index, n, byteSize).isDefined then val global = isGlobal(index, n, byteSize).get - val result = graph.mergeCells(lhsCell, graph.getPointee(global)) - graph.varToCell(n).update(variable, result) + graph.mergeCells(lhsCell, graph.getPointee(global)) else if isStack(index, n).isDefined then val stack = isStack(index, n).get - val result = graph.mergeCells(lhsCell, graph.getPointee(stack)) - graph.varToCell(n).update(variable, result) + graph.mergeCells(lhsCell, graph.getPointee(stack)) else index match case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => - assert(varToSym(n).contains(arg1)) +// assert(varToSym(n).contains(arg1)) val offset = evaluateExpression(arg2, constProp(n)).get.value - val nodes: Set[DSN] = getNodes(n, arg1) - nodes.foreach(_.addCell(offset, byteSize)) - val cell = nodes.foldLeft(lhsCell){ - (c, node) => - var field = offset - node.addCell(offset, byteSize) - if node.collapsed then - field = 0 - graph.mergeCells(c, graph.getPointee(node.cells(field))) // TODO this causing everything to collapse - } - graph.varToCell(n).update(variable, cell) + visitPointerArithmeticOperation(n, lhsCell, arg1, byteSize, true, offset) + case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isEmpty=> +// assert(varToSym(n).contains(arg1)) + visitPointerArithmeticOperation(n, lhsCell, arg1, byteSize, true, 0, true) case arg: Variable => - assert(varToSym(n).contains(arg)) - val cells: Set[DSC] = getCells(n, arg) - - val cell = cells.foldLeft(lhsCell){ - (c, p) => - graph.mergeCells(c, graph.getPointee(p)) // TODO this causing everything to collapse - } - graph.varToCell(n).update(variable, cell) +// assert(varToSym(n).contains(arg)) + visitPointerArithmeticOperation(n, lhsCell, arg, byteSize, true) case _ => ??? case _ => + var containsPointer = false breakable { - var containsPointer = false for (v <- expr.variables) { if varToSym.contains(n) && varToSym(n).contains(v) then containsPointer = true break } - if containsPointer then - val cell = expr.variables.foldLeft(lhsCell) { - (c, v) => - val cells: Set[DSC] = getCells(n, v) - - cells.foldLeft(c) { - (c, p) => - graph.mergeCells(c, p) // TODO this causing everything to collapse - } - } - val node = cell.node.get - graph.collapseNode(node) - graph.varToCell(n).update(variable, node.cells(0)) } - case MemoryAssign(memory, MemoryStore(mem, index, value: Variable, endian, size), label) => - if n.isInstanceOf[MemoryAssign] && n.asInstanceOf[MemoryAssign].label.get.startsWith("%00000318") then - print("") + if containsPointer then + val cell = expr.variables.foldLeft(lhsCell) { + (c, v) => + val cells: Set[(DSC, BigInt)] = getCells(n, v) + + cells.foldLeft(c) { + (c, p) => + graph.mergeCells(c, p._1) + } + } + val node = cell.node.get + graph.collapseNode(node) + + case MemoryAssign(memory, MemoryStore(mem, index, expr: Expr, endian, size), label) if unwrapPaddingAndSlicing(expr).isInstanceOf[Variable] => // if value is a literal ignore it + val value: Variable = unwrapPaddingAndSlicing(expr).asInstanceOf[Variable] val byteSize = (size.toDouble/8).ceil.toInt - val addressCell: DSC = + val addressPointee: DSC = if isGlobal(index, n, byteSize).isDefined then - isGlobal(index, n, byteSize).get + graph.getPointee(isGlobal(index, n, byteSize).get) else if isStack(index, n).isDefined then - isStack(index, n).get + graph.getPointee(isStack(index, n).get) else index match case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => - assert(varToSym(n).contains(arg1)) +// assert(varToSym(n).contains(arg1)) val offset = evaluateExpression(arg2, constProp(n)).get.value - val nodes: Set[DSN] = getNodes(n, arg1) - nodes.foreach(_.addCell(offset, byteSize)) - val cell = nodes.foldLeft(DSN(Some(graph), None).cells(0)) { - (c, node) => - var field = offset - node.addCell(offset, byteSize) - if node.collapsed then - field = 0 - graph.mergeCells(c, node.cells(field)) // TODO this causing everything to collapse - } - cell + visitPointerArithmeticOperation(n, DSN(Some(graph), None).cells(0), arg1, byteSize, true, offset) case arg: Variable => - assert(varToSym(n).contains(arg)) - val cells: Set[DSC] = getCells(n, arg) - val cell = cells.foldLeft(DSN(Some(graph), None).cells(0)) { - (c, p) => - graph.mergeCells(c, p) // TODO this causing everything to collapse - } - cell +// assert(varToSym(n).contains(arg)) + visitPointerArithmeticOperation(n, DSN(Some(graph), None).cells(0), arg, byteSize, true) case _ => ??? val valueCells = getCells(n, value) - val result = valueCells.foldLeft(graph.getPointee(addressCell)) { + val result = valueCells.foldLeft(addressPointee) { (c, p) => - graph.mergeCells(p, c) + graph.mergeCells(p._1, c) } - if reachingDefs(n).contains(value) then - reachingDefs(n)(value).foreach ( - definition => - graph.varToCell(definition).update(value, result) - ) - else - graph.formals.update(value, result) - case _ => } - def analyze(): Any = - val domain = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).reverse - - + def analyze(): DSG = + val domain = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString) domain.foreach(visit) - println(graph.formals) val results = graph.varToCell.keys.toSeq.sortBy(_.toShortString) results.foreach { @@ -317,4 +301,5 @@ class Local( } } println(graph.pointTo) + graph } diff --git a/src/main/scala/analysis/ReachingDefs.scala b/src/main/scala/analysis/ReachingDefs.scala index e1ef02488..ff330f1ee 100644 --- a/src/main/scala/analysis/ReachingDefs.scala +++ b/src/main/scala/analysis/ReachingDefs.scala @@ -10,8 +10,6 @@ abstract class ReachingDefs(program: Program, writesTo: Map[Procedure, Set[Regis val lattice: MapLattice[CFGPosition, Map[Variable, Set[CFGPosition]], MapLattice[Variable, Set[CFGPosition], PowersetLattice[CFGPosition]]] = new MapLattice(new MapLattice(new PowersetLattice[CFGPosition]())) def transfer(n: CFGPosition, s: Map[Variable, Set[CFGPosition]]): Map[Variable, Set[CFGPosition]] = - if n.isInstanceOf[LocalAssign] && n.asInstanceOf[LocalAssign].label.get.startsWith("%000004f4") then - print("") n match case loc:LocalAssign => s + (loc.lhs -> Set(n)) diff --git a/src/main/scala/analysis/SymbolicAccessAnalysis.scala b/src/main/scala/analysis/SymbolicAccessAnalysis.scala index 247862594..d412c2f5f 100644 --- a/src/main/scala/analysis/SymbolicAccessAnalysis.scala +++ b/src/main/scala/analysis/SymbolicAccessAnalysis.scala @@ -2,7 +2,7 @@ package analysis import analysis.solvers.ForwardIDESolver import ir.IRWalk.procedure -import ir.{BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Extract, GoTo, IndirectCall, Literal, LocalAssign, Memory, MemoryLoad, MemoryStore, Procedure, Program, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend} +import ir.{BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, GoTo, IndirectCall, Literal, LocalAssign, Memory, MemoryLoad, MemoryStore, Procedure, Program, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend} import java.math.BigInteger @@ -81,6 +81,12 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem result } + def unwrapPaddingAndSlicing(expr: Expr): Expr = + expr match + case Extract(end, start, body) if start == 0 && end == 32 => unwrapPaddingAndSlicing(body) + case ZeroExtend(extension, body) => unwrapPaddingAndSlicing(body) + case _ => expr + def edgesCallToEntry(call: DirectCall, entry: Procedure)(d: DL): Map[DL, EdgeFunction[TwoElement]] = d match case Left(value) => @@ -112,7 +118,8 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem val bitvecnegative: BigInt = new BigInt(new BigInteger("9223372036854775808")) // negative 64 bit integer n match - case LocalAssign(variable, expr, maybeString) => + case LocalAssign(variable, rhs, maybeString) => + val expr = unwrapPaddingAndSlicing(rhs) expr match case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) && arg1.equals(stackPointer) && evaluateExpression(arg2, constProp(n)).isDefined && evaluateExpression(arg2, constProp(n)).get.value >= bitvecnegative => diff --git a/src/main/scala/analysis/WriteToAnalysis.scala b/src/main/scala/analysis/WriteToAnalysis.scala index b1a58ad24..be40f7269 100644 --- a/src/main/scala/analysis/WriteToAnalysis.scala +++ b/src/main/scala/analysis/WriteToAnalysis.scala @@ -45,6 +45,6 @@ class WriteToAnalysis(program: Program) extends Analysis[Map[Procedure, Set[Regi } def analyze(): Map[Procedure, Set[Register]] = - program.procedures.foreach(proc => getWritesTos) + program.procedures.foreach(getWritesTos) writesTo.toMap } diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index da353ac21..2420557bd 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -28,7 +28,6 @@ import util.Logger import java.util.Base64 import spray.json.DefaultJsonProtocol.* import util.intrusive_list.IntrusiveList -import analysis.CfgCommandNode import scala.annotation.tailrec import scala.collection.mutable @@ -60,7 +59,9 @@ case class StaticAnalysisContext( steensgaardResults: Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]], mmmResults: MemoryModelMap, memoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])] + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + symbolicAccessess: Map[CFGPosition, Map[SymbolicAccess, TwoElement]], + dsg: Option[DSG] ) /** Results of the main program execution. @@ -708,7 +709,9 @@ object StaticAnalysis { steensgaardResults = steensgaardResults, mmmResults = mmm, memoryRegionContents = memoryRegionContents, - reachingDefs = reachingDefinitionsAnalysisResults + reachingDefs = reachingDefinitionsAnalysisResults, + symbolicAccessess = symResults, + dsg = None, ) } @@ -948,8 +951,31 @@ object RunUtils { writeToFile(newCFG.toDot(x => x.toString, Output.dotIder), s"${s}_resolvedCFG.dot") } + Logger.info("[!] Running Region Builder") + val writesTo = WriteToAnalysis(ctx.program).analyze() + val reachingDefs = ReachingDefsAnalysis(ctx.program, writesTo).analyze() + config.analysisDotPath.foreach( + s => + writeToFile(toDot(ctx.program), s"${s}_ct.dot") + ) + val b = Local(ctx.program.mainProcedure, analysisResult.last.symbolicAccessess, analysisResult.last.IRconstPropResult, ctx.globals, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo).analyze() + Logger.info(s"[!] Finished indirect call resolution after $iteration iterations") - analysisResult.last + StaticAnalysisContext( + cfg = analysisResult.last.cfg, + constPropResult = analysisResult.last.constPropResult, + IRconstPropResult = analysisResult.last.IRconstPropResult, + memoryRegionResult = analysisResult.last.memoryRegionResult, + vsaResult = analysisResult.last.vsaResult, + interLiveVarsResults = analysisResult.last.interLiveVarsResults, + paramResults = analysisResult.last.paramResults, + steensgaardResults = analysisResult.last.steensgaardResults, + mmmResults = analysisResult.last.mmmResults, + memoryRegionContents = analysisResult.last.memoryRegionContents, + symbolicAccessess = analysisResult.last.symbolicAccessess, + dsg = Some(b), + reachingDefs = analysisResult.last.reachingDefs + ) } } diff --git a/src/test/scala/LocalTest.scala b/src/test/scala/LocalTest.scala new file mode 100644 index 000000000..a78591e87 --- /dev/null +++ b/src/test/scala/LocalTest.scala @@ -0,0 +1,56 @@ +import ir.Endian.BigEndian +import ir.{BVADD, BinaryExpr, BitVecLiteral, ConvertToSingleProcedureReturn, DirectCall, LocalAssign, Memory, MemoryAssign, MemoryLoad, MemoryStore} +import org.scalatest.funsuite.AnyFunSuite +import test_util.TestUtil +import ir.dsl.* +import specification.Specification +import util.{IRContext, RunUtils, StaticAnalysisConfig} + +class LocalTest extends AnyFunSuite, TestUtil { + test("internal merge") { + val mem = Memory("mem", 10000, 10000) + var program = prog( + proc("main", + block("operations", +// LocalAssign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), + LocalAssign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")), + LocalAssign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")), + MemoryAssign(mem, MemoryStore(mem, R7, R1, BigEndian, 64), Some("00003")), + MemoryAssign(mem, MemoryStore(mem, R6, R2, BigEndian, 64), Some("00004")), + ret + ) + ) + ) + + val returnUnifier = ConvertToSingleProcedureReturn() + program = returnUnifier.visitProgram(program) + + val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Map.empty, Specification(Set(), Map(), List(), List(), List(), Set()), program)) + + + } + + test("offseting from middle of cell") { + val mem = Memory("mem", 10000, 10000) + var program = prog( + proc("main", + block("operations", + // LocalAssign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), + LocalAssign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")), + LocalAssign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")), + MemoryAssign(mem, MemoryStore(mem, R7, R1, BigEndian, 64), Some("00003")), + MemoryAssign(mem, MemoryStore(mem, R6, R2, BigEndian, 64), Some("00004")), + LocalAssign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(16, 64)), Some("00005")), // TODO check with 8 + ret + ) + ) + ) + + val returnUnifier = ConvertToSingleProcedureReturn() + program = returnUnifier.visitProgram(program) + + val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Map.empty, Specification(Set(), Map(), List(), List(), List(), Set()), program)) + + + } +} From 87724520e102bb11596fff62ff78c06b32397d64 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Fri, 10 May 2024 09:23:49 +1000 Subject: [PATCH 008/104] tests --- src/main/scala/analysis/DSAUtility.scala | 53 ++++++---- src/main/scala/analysis/Local.scala | 40 ++++++-- src/test/scala/LocalTest.scala | 123 +++++++++++++++++++++-- 3 files changed, 180 insertions(+), 36 deletions(-) diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index 880b36de2..88459cde0 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -232,7 +232,7 @@ class DSG(val proc: Procedure, pointTo.remove(cell2) val internalOffsetChange = cell2.offset - cell1.offset replace(cell2, cell1, internalOffsetChange) - cell1.growSize(cell2.offset + cell2.largestAccessedSize) // might cause another collapse + cell1.growSize((cell2.offset - cell1.offset) + cell2.largestAccessedSize) // might cause another collapse cell1 @@ -315,7 +315,7 @@ class DSG(val proc: Procedure, resultCells.foreach { case (offset: BigInt, (cells: Set[DSC], largestAccess: BigInt)) => - val collapsedCell = resultNode.addCell(offset, largestAccess)._1 + val collapsedCell = resultNode.addCell(offset, largestAccess) val outgoing: Set[DSC] = cells.foldLeft(Set()){ (set, cell) => // replace incoming edges @@ -398,13 +398,9 @@ class DSG(val proc: Procedure, case _ => m } - - def addNode(memoryRegion2: MemoryRegion2, offset: BigInt, size: Int): DSN = ??? } -class DSN(val graph: Option[DSG], var region: Option[MemoryRegion2]) { - - val id: Int = NodeCounter.getCounter +class DSN(val graph: Option[DSG], var region: Option[MemoryRegion2], val id: Int = NodeCounter.getCounter) { var collapsed = false @@ -430,23 +426,43 @@ class DSN(val graph: Option[DSG], var region: Option[MemoryRegion2]) { if newSize > size then size = newSize - def addCell(offset: BigInt, size: BigInt) : (DSC, BigInt) = - this.updateSize(offset + size) - if !cells.contains(offset) then - cells.foreach{ - case (start:BigInt, cell:DSC) => + def getCell(offset: BigInt): DSC = + if collapsed then + cells(0) + else if !cells.contains(offset) then + var result: Option[DSC] = None + cells.foreach { + case (start: BigInt, cell: DSC) => if start < offset && offset < (start + cell.largestAccessedSize) then - val internalOffset = offset - start - cell.growSize(internalOffset + size) - return (cell, internalOffset) + result = Some(cell) } + result match + case Some(value) => value + case None => + ??? + else + cells(offset) + + + def addCell(offset: BigInt, size: BigInt) : DSC = + this.updateSize(offset + size) + if collapsed then + cells(0) + else if !cells.contains(offset) then +// cells.foreach{ +// case (start:BigInt, cell:DSC) => +// if start < offset && offset < (start + cell.largestAccessedSize) then +// val internalOffset = offset - start +// cell.growSize(internalOffset + size) +// return (cell, internalOffset) +// } val cell = DSC(Some(this), offset) cells.update(offset, cell) cell.growSize(size) - (cell, 0) + cell else cells(offset).growSize(size) - (cells(offset), 0) + cells(offset) override def equals(obj: Any): Boolean = @@ -455,6 +471,8 @@ class DSN(val graph: Option[DSG], var region: Option[MemoryRegion2]) { this.id == node.id case _ => false + override def hashCode(): Int = id + override def toString: String = s"Node($id, $allocationRegions ${if collapsed then ", collapsed" else ""})" } @@ -469,7 +487,6 @@ case class DSC(node: Option[DSN], offset: BigInt) true else false - override def toString: String = s"Cell(${if node.isDefined then node.get.toString else "NONE"}, $offset)" } diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index cc4dc6503..9bc44acbf 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -111,7 +111,7 @@ class Local( if node.collapsed then Some(node.cells(0)) else - Some(node.addCell(internal + offset, 0)._1) + Some(node.getCell(internal + offset)) else None @@ -151,28 +151,46 @@ class Local( // get the cells of all the SSA variables in the set val cells: Set[(DSC, BigInt)] = getCells(position, rhs) // merge the cells or their pointees with lhs - cells.foldLeft(lhs) { + var result = cells.foldLeft(lhs) { (c, t) => val cell = t._1 val internalOffset = t._2 - if offset != 0 then // it's R_x = R_y + offset + if !collapse then // offset != 0 then // it's R_x = R_y + offset val node = cell.node.get // get the node of R_y var field = offset + cell.offset + internalOffset // calculate the total offset node.addCell(field, size) // add cell there if doesn't already exists - graph.optionalCollapse(node) +// graph.optionalCollapse(node) if node.collapsed then field = 0 - graph.mergeCells(c, if pointee then graph.getPointee(node.addCell(field, 0)._1) else node.addCell(field, 0)._1) + graph.mergeCells(c, if pointee then graph.getPointee(node.getCell(field)) else node.getCell(field)) else - if collapse then +// if collapse then val node = cell.node.get graph.collapseNode(node) graph.mergeCells(c, if pointee then graph.getPointee(node.cells(0)) else node.cells(0)) - else - cell.node.get.addCell(cell.offset, size) //update the size of the cell - graph.optionalCollapse(cell.node.get) - graph.mergeCells(c, if pointee then graph.getPointee(cell.node.get.addCell(cell.offset, 0)._1) else cell.node.get.addCell(cell.offset, 0)._1) +// else +// cell.node.get.addCell(cell.offset + internalOffset, size) //update the size of the cell +//// graph.optionalCollapse(cell.node.get) +// graph.mergeCells(c, if pointee then graph.getPointee(cell.node.get.getCell(cell.offset + internalOffset)) else cell.node.get.getCell(cell.offset + internalOffset)) } + if pointee then + cells.foreach( + t => + val offset = t._1.offset + val internalOffset = t._2 + val node = t._1.node.get + val cell = node.getCell(offset + internalOffset) + if graph.pointTo.contains(cell) && graph.pointTo(cell).equals(result) then + graph.optionalCollapse(node) + assert(graph.pointTo.contains(node.getCell(offset))) + result = graph.getPointee(node.getCell(offset)) + else + graph.optionalCollapse(node) + ) + val resultOffset = result.offset + graph.optionalCollapse(result.node.get) + result.node.get.getCell(result.offset) + def visit(n: CFGPosition): Unit = { @@ -285,7 +303,7 @@ class Local( case _ => } def analyze(): DSG = - val domain = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString) + val domain = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).reverse domain.foreach(visit) diff --git a/src/test/scala/LocalTest.scala b/src/test/scala/LocalTest.scala index a78591e87..9ca42aad4 100644 --- a/src/test/scala/LocalTest.scala +++ b/src/test/scala/LocalTest.scala @@ -1,20 +1,55 @@ +import analysis.{DSC, DSG, DSN} import ir.Endian.BigEndian import ir.{BVADD, BinaryExpr, BitVecLiteral, ConvertToSingleProcedureReturn, DirectCall, LocalAssign, Memory, MemoryAssign, MemoryLoad, MemoryStore} import org.scalatest.funsuite.AnyFunSuite import test_util.TestUtil import ir.dsl.* import specification.Specification -import util.{IRContext, RunUtils, StaticAnalysisConfig} +import util.{BASILConfig, BoogieGeneratorConfig, ILLoadingConfig, IRContext, RunUtils, StaticAnalysisConfig} class LocalTest extends AnyFunSuite, TestUtil { + + test("basic pointer") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/basicpointer/basicpointer.adt", + relfFile = "examples/basicpointer/basicpointer.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val dsg = results.analysis.get.dsg.get + assert(dsg.pointTo.size == 9) + val framePointer = DSC(Some(DSN(None, None, 1)), 0) // R31 + assert(dsg.pointTo(framePointer).equals(dsg.formals(R29)._1)) + val stack8 = DSC(Some(DSN(None, None, 2)), 0) // R31 + 8 + assert(dsg.pointTo(stack8).equals(dsg.formals(R30)._1)) + val stack40 = DSC(Some(DSN(None, None, 3)), 0) // R31 + 40 + val stack32 = DSC(Some(DSN(None, None, 5)), 0) // R31 + 32 + val stack24 = dsg.pointTo(stack32) // R31 + 24 and Malloc + assert(stack24.node.get.collapsed) + assert(dsg.pointTo(stack24).equals(stack24)) + assert(dsg.pointTo(stack40).equals(dsg.getPointee(dsg.getPointee(DSC(Some(DSN(None, None, 12)), 0))))) + +// assert(dsg.pointTo.contains(framePointer)) + } + + test("internal merge") { val mem = Memory("mem", 10000, 10000) + val locAssign1 = LocalAssign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) + val locAssign2 = LocalAssign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) var program = prog( proc("main", block("operations", // LocalAssign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), - LocalAssign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")), - LocalAssign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")), + locAssign1, + locAssign2, MemoryAssign(mem, MemoryStore(mem, R7, R1, BigEndian, 64), Some("00003")), MemoryAssign(mem, MemoryStore(mem, R6, R2, BigEndian, 64), Some("00004")), ret @@ -26,21 +61,59 @@ class LocalTest extends AnyFunSuite, TestUtil { program = returnUnifier.visitProgram(program) val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Map.empty, Specification(Set(), Map(), List(), List(), List(), Set()), program)) + val dsg: DSG = results.dsg.get + assert(dsg.formals(R1).equals(dsg.formals(R2))) + assert(dsg.varToCell(locAssign1)(R6)._1.equals(dsg.varToCell(locAssign2)(R7)._1)) + assert(dsg.varToCell(locAssign1)(R6)._2 == 0) + assert(dsg.varToCell(locAssign2)(R7)._2 == 1) + assert(dsg.pointTo.contains(dsg.varToCell(locAssign1)(R6)._1)) + assert(dsg.pointTo(dsg.varToCell(locAssign1)(R6)._1).equals(dsg.formals(R1)._1)) + assert(dsg.pointTo.size == 1) + + } + + test("offsetting from middle of cell to a new cell") { + val mem = Memory("mem", 10000, 10000) + val locAssign1 = LocalAssign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) + val locAssign2 = LocalAssign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) + val locAssign3 = LocalAssign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(8, 64)), Some("00005")) + + var program = prog( + proc("main", + block("operations", + locAssign1, + locAssign2, + MemoryAssign(mem, MemoryStore(mem, R7, R1, BigEndian, 64), Some("00003")), + MemoryAssign(mem, MemoryStore(mem, R6, R2, BigEndian, 64), Some("00004")), + locAssign3, + ret + ) + ) + ) + val returnUnifier = ConvertToSingleProcedureReturn() + program = returnUnifier.visitProgram(program) + val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Map.empty, Specification(Set(), Map(), List(), List(), List(), Set()), program)) + val dsg: DSG = results.dsg.get + assert(dsg.varToCell(locAssign3)(R5)._1.offset == 13) } - test("offseting from middle of cell") { + test("offsetting from middle of cell to the same cell") { val mem = Memory("mem", 10000, 10000) + val locAssign1 = LocalAssign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) + val locAssign2 = LocalAssign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) + val locAssign3 = LocalAssign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(7, 64)), Some("00005")) + var program = prog( proc("main", block("operations", // LocalAssign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), - LocalAssign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")), - LocalAssign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")), + locAssign1, + locAssign2, MemoryAssign(mem, MemoryStore(mem, R7, R1, BigEndian, 64), Some("00003")), MemoryAssign(mem, MemoryStore(mem, R6, R2, BigEndian, 64), Some("00004")), - LocalAssign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(16, 64)), Some("00005")), // TODO check with 8 + locAssign3, ret ) ) @@ -50,7 +123,43 @@ class LocalTest extends AnyFunSuite, TestUtil { program = returnUnifier.visitProgram(program) val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Map.empty, Specification(Set(), Map(), List(), List(), List(), Set()), program)) + val dsg: DSG = results.dsg.get + assert(dsg.formals(R1).equals(dsg.formals(R2))) + assert(dsg.varToCell(locAssign1)(R6)._1.equals(dsg.varToCell(locAssign2)(R7)._1)) + assert(dsg.varToCell(locAssign1)(R6)._1.equals(dsg.varToCell(locAssign3)(R5)._1)) + assert(dsg.varToCell(locAssign1)(R6)._2 == 0) + assert(dsg.varToCell(locAssign2)(R7)._2 == 1) + assert(dsg.varToCell(locAssign3)(R5)._2 == 8) + assert(dsg.pointTo.contains(dsg.varToCell(locAssign1)(R6)._1)) + assert(dsg.pointTo(dsg.varToCell(locAssign1)(R6)._1).equals(dsg.formals(R1)._1)) + assert(dsg.pointTo.size == 1) + } + test("internal offset transfer") { + val mem = Memory("mem", 10000, 10000) + val locAssign1 = LocalAssign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) + val locAssign2 = LocalAssign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) + val locAssign3 = LocalAssign(R5, R7, Some("00005")) + var program = prog( + proc("main", + block("operations", + // LocalAssign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), + locAssign1, + locAssign2, + MemoryAssign(mem, MemoryStore(mem, R7, R1, BigEndian, 64), Some("00003")), + MemoryAssign(mem, MemoryStore(mem, R6, R2, BigEndian, 64), Some("00004")), + locAssign3, + ret + ) + ) + ) + + val returnUnifier = ConvertToSingleProcedureReturn() + program = returnUnifier.visitProgram(program) + + val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Map.empty, Specification(Set(), Map(), List(), List(), List(), Set()), program)) + val dsg: DSG = results.dsg.get + assert(dsg.varToCell(locAssign2)(R7).equals(dsg.varToCell(locAssign3)(R5))) } } From 2b8b5638b01168165172a3bf5dc6463ede2cfc8b Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 13 May 2024 11:33:38 +1000 Subject: [PATCH 009/104] added call sites --- src/main/scala/analysis/DSA.scala | 26 ++ src/main/scala/analysis/DSAGraph.scala | 314 ---------------- src/main/scala/analysis/DSAUtility.scala | 183 +++++++--- src/main/scala/analysis/Local.scala | 96 ++--- src/main/scala/analysis/LocalDSA.scala | 131 ------- .../scala/analysis/PointerTypeAnalysis.scala | 83 ----- src/main/scala/analysis/PrePass.scala | 184 ---------- src/main/scala/analysis/RegionBuilder.scala | 336 ------------------ .../analysis/SymbolicAccessAnalysis.scala | 35 +- src/main/scala/util/RunUtils.scala | 3 +- src/test/scala/LocalTest.scala | 10 +- 11 files changed, 199 insertions(+), 1202 deletions(-) create mode 100644 src/main/scala/analysis/DSA.scala delete mode 100644 src/main/scala/analysis/DSAGraph.scala delete mode 100644 src/main/scala/analysis/LocalDSA.scala delete mode 100644 src/main/scala/analysis/PointerTypeAnalysis.scala delete mode 100644 src/main/scala/analysis/PrePass.scala delete mode 100644 src/main/scala/analysis/RegionBuilder.scala diff --git a/src/main/scala/analysis/DSA.scala b/src/main/scala/analysis/DSA.scala new file mode 100644 index 000000000..6a967badd --- /dev/null +++ b/src/main/scala/analysis/DSA.scala @@ -0,0 +1,26 @@ +package analysis + +import ir.{BitVecLiteral, CFGPosition, Procedure, Program, Register, Variable} +import specification.{ExternalFunction, SpecGlobal} + +import scala.collection.mutable + +class DSA(program: Program, + symResults: Map[CFGPosition, Map[SymbolicAccess, TwoElement]], + constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + globals: Set[SpecGlobal], globalOffsets: Map[BigInt, BigInt], + externalFunctions: Set[ExternalFunction], + reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], + writesTo: Map[Procedure, Set[Register]], + params: Map[Procedure, Set[Variable]] + ) extends Analysis[Any] { + + val DSGs : mutable.Map[Procedure, DSG] = mutable.Map() + override def analyze(): Any = { + program.procedures.foreach( + proc => + val dsg = Local(proc, symResults, constProp, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params).analyze() + DSGs.update(proc, dsg) + ) + } +} diff --git a/src/main/scala/analysis/DSAGraph.scala b/src/main/scala/analysis/DSAGraph.scala deleted file mode 100644 index 522e04881..000000000 --- a/src/main/scala/analysis/DSAGraph.scala +++ /dev/null @@ -1,314 +0,0 @@ -//package analysis -// -//import analysis.Node.getNextId -//import com.sun.org.apache.xalan.internal.xsltc.compiler.util.NodeType -//import ir.{Expr, Procedure, Register, Variable} -// -//import scala.collection.mutable -// -//// need a type procedure -// -////type Pointer = Cell | Variable -// -///** -// * DSA Graph -// */ -//class Graph(val procedure: Procedure) { -// -// val nodes: mutable.Set[Node] = mutable.Set() -// val pointersToCells: mutable.Map[Variable, Cell] = mutable.Map() -// // TODO refactor the one below -// // If cells change i don't think this will work. -// var pointsToRelations: mutable.Map[Cell, Cell] = mutable.Map() -// -// -// -// /** -// * -// * @param node -// * @return Set[(node, offset_i)_pointer, cell_pointee) -// */ -// def getPointees(node: Node): Set[(Cell, Cell)] = { -// pointsToRelations.foldLeft(Set(): Set[(Cell, Cell)]) { -// (s, m) => -// m match -// case (key, value) => -// if node.cells.keys.toSet.contains(key) then s.+((key, value)) else s -// } -// } -// -// def getPointers(node: Node): Set[(Cell, Cell)] = { -// pointsToRelations.foldLeft(Set(): Set[(Cell, Cell)]) { -// (s, m) => -// m match -// case (key, value) => -// if node.cells.values.toSet.contains(value) then s.+((key, value)) else s -// } -// } -// -// def pointTo(pointer: Cell, pointee: Option[Cell]): Unit = { -//// pointer.pointTo(pointee) -// pointee match -// case Some(value) => -// pointsToRelations.put(pointer, value) -// case None => pointsToRelations.remove(pointer) -// } -// -// -// def makeNode(memoryRegion2: Option[MemoryRegion2] = None): Node = { -// val node = Node(memoryRegion2, this) -// nodes.add(node) -// node -// } -// -// def makeCell(memoryRegion2: Option[MemoryRegion2] = None): Cell = { -// val node = makeNode(memoryRegion2) -// node.cell() -// } -// -// def unify(variable: Variable, cell: Cell): Unit = { -//// if !pointersToCells.contains(variable) then -//// pointersToCells.put(variable, cell) -//// else -//// pointersToCells(variable).unify(cell) -// getVariablePointee(variable).unify(cell) -// } -// -// def validateGraph(): Unit = { -// pointersToCells.values.toSet.union(pointsToRelations.keys.toSet.union(pointsToRelations.values.toSet)).foreach(validateCell) -// } -// -// def validateCell(cell: Cell): Unit = { -// assert(cell.node.isDefined) -// val node = cell.node.get -// println(cell) -// assert(nodes.contains(node)) -// assert(node.cells.contains(cell.offset)) -// assert(node.cells(cell.offset).equals(cell)) -// } -// -// def collapsePointer(pointer: Variable): Unit = { -// val cell = makeCell() -// cell.node.get.collapseNode() -// unify(pointer, cell) -// } -// -// def getVariablePointee(v: Variable): Cell = { -// pointersToCells.getOrElseUpdate(v, makeCell()) -// } -// -// def getCellPointee(c: Cell): Cell = { -// pointsToRelations.getOrElseUpdate(c, makeCell()) -// } -//} -// -//object Node { -// private var idCounter : Int = 0; -// private def getNextId: Int = { -// idCounter += 1 -// idCounter -// } -//} -// -// -///** -// * DSA Node represents a memory object -// */ -//class Node (var memoryRegion2: Option[MemoryRegion2], val owner: Graph) { -// val id = getNextId -// var cells: mutable.Map[BigInt, Cell] = mutable.Map() -// -//// var cells: mutable.Set[Cell] = mutable.Set() -// private val flags: NodeFlags = NodeFlags() -// var size: BigInt = memoryRegion2 match -// case Some(value) => // TODO get sizes of data regions and stack regions -// value match -// case DataRegion2(regionIdentifier, start) => 8 -// case HeapRegion2(regionIdentifier, proc, size) => size.value -// case StackRegion2(regionIdentifier, proc, size) => size.value -// case _ => 8 -// case None => 8 -// -// -// override def toString: String = s"Node($id, $memoryRegion2, $size)" -// -// override def equals(obj: Any): Boolean = -// obj match -// case n: Node => id == n.id -// case _ => false -// -// -// def links: IterableOnce[BigInt] = -// cells.keys -// -// def offsetHelper(offset1: BigInt, offset2: BigInt): BigInt = { -// if isCollapsed then -// 0 -// else if isSeq then -// (offset1 + offset2) % size -// else -// offset1 + offset2 -// } -// -// def redirectEdges(node: Node, offset: BigInt): Unit = { -// owner.getPointers(this).foreach( -// (pointer, pointee) => -// val newCell = node.cell(node.offsetHelper(offset, pointee.offset)) -// owner.pointTo(pointer, Some(newCell)) -// owner.pointersToCells.foreach( -// (key, value) => -// if value.equals(pointee) then owner.pointersToCells.put(key, newCell) -// ) -// ) -// -// owner.getPointees(this).foreach( -// (pointer, pointee) => -// val newCell = node.cell(node.offsetHelper(offset, pointer.offset)) -// if owner.pointsToRelations.contains(newCell) then -// pointee.unify(owner.pointsToRelations(newCell)) -// else -// owner.pointTo(newCell, Some(pointee)) -// ) -// -// owner.nodes.remove(this) -// owner.pointsToRelations = owner.pointsToRelations.filter( -// (key, value) => !(key.equals(this) && value.equals(this)) -// ) -// -// -// } -// def collapseNode(): Unit = { -// val cell = owner.makeCell(None) -// cells.foreach( -// (offset, c) => -// cell.unify(owner.getCellPointee(c)) -// owner.pointTo(c, None) -// ) -// size = 1 -// flags.collapsed = true -// owner.pointTo(this.cell(), Some(cell)) -// } -// -// def collapse(node: Node, offset: BigInt): Unit = { -// node.collapseNode() -// redirectEdges(node, offset) -// } -// -// def unify(node: Node, offset: BigInt = 0): Unit = { -//// owner.validateGraph() -// println(node) -// println(this) -// this.memoryRegion2 = node.memoryRegion2 -// val updatedOffset = offsetHelper(offset, 0) -// if (isCollapsed && !node.isCollapsed) { -// return collapse(node, updatedOffset) -// } else if (!isCollapsed && !node.isCollapsed) { -// if (isSeq && !node.isSeq) { -// if updatedOffset == 0 then node.unify(this) else return collapse(node, updatedOffset) -// } else if (!isSeq && node.isSeq) { -// if size % node.size == 0 then -// flags.seq = true -// return unify(node, offset) -// else if size + updatedOffset > node.size then -// return collapse(node, updatedOffset) -// } else if (isSeq && node.isSeq) { -// if size < node.size then return node.unify(this, 0) -// else if node.size % size != 0 || offsetHelper(offset, 0) > 0 then return collapse(node, updatedOffset) -// } -// } -// -// if this.equals(node) && updatedOffset > 0 then return node.collapseNode() -// redirectEdges(node, updatedOffset) -//// owner.validateGraph() -// } -// -// -// def cell(offset: BigInt = 0): Cell = { -// cells.getOrElseUpdate(offset, -// makeCell(offset) -// ) -// } -// -// -// private def makeCell(offset: BigInt = 0): Cell = { -// val cell = Cell(Some(this), offset) -// cells.update(offset, cell) -// cell -// } -// -// def updateSize(s: BigInt): Unit = { -// if isSeq && size != s then -// collapseNode() -// else if (!isSeq && s > size) then -// size = s -// } -// -// -// def isCollapsed = flags.collapsed -// def isSeq = flags.seq -// -// def setSeq(value: Boolean = true): Unit = { -// flags.seq = value -// } -// -//} -// -///** -// * Node flags -// */ -//class NodeFlags { -// var collapsed = false -// var seq = false -// def join(n: NodeFlags): Unit = { -// -// } -//} -// -///** -// * A memory cell (or a field). An offset into a memory object. -// */ -//class Cell(var node: Option[Node] = None, val offset: BigInt = 0) { -// -//// private var pointsTo: Option[Cell] = None -// private def n = node.get -// -// override def toString: String = s"Cell($node, $offset)" -// -//// def this(cell: Cell) = { -//// this(cell.node, cell.offset) -////// pointsTo = cell.pointsTo -//// } -//// -//// def this(cell: Cell, offset: BigInt) = { -//// this(cell.node, cell.offset + offset) -////// pointsTo = cell.pointsTo -//// } -//// -//// def this(node: Node, offset : BigInt) = { -//// this(Some(node), offset) -//// } -// -// -// override def equals(obj: Any): Boolean = { -// obj match -// case cell: Cell => cell.node.equals(this.node) && cell.offset == this.offset -// case _ => false -// } -// -// def unify(cell: Cell): Unit = { -// if (offset < cell.offset) then -// n.unify(cell.n, cell.offset - offset) -// else if (cell.offset < offset) then -// cell.n.unify(n, offset-cell.offset) -// else -// n.unify(cell.n) -// } -// -//} -// -///** -// * Simulation relation mapping -// */ -//class SimulationMap { -// -//} diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index 88459cde0..a5ad32e0c 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -24,11 +24,14 @@ class DSG(val proc: Procedure, varToSym: Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]], globals: Set[SpecGlobal], globalOffsets: Map[BigInt, BigInt], externalFunctions: Set[ExternalFunction], - reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], - writesTo: Map[Procedure, Set[Register]]) { + val reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], + val writesTo: Map[Procedure, Set[Register]], + val params: Map[Procedure, Set[Variable]] + ) { // DSNodes owned by this graph val nodes: mutable.Set[DSN] = mutable.Set() val pointTo: mutable.Map[DSC, DSC] = mutable.Map() + val callsites: mutable.Set[CallSite] = mutable.Set() val mallocRegister = Register("R0", BitVecType(64)) val stackPointer = Register("R31", BitVecType(64)) @@ -54,7 +57,9 @@ class DSG(val proc: Procedure, assert(!m(offset).cells(0).growSize(byteSize)) m else - val node = DSN(Some(this), Some(StackRegion2(pos.toShortString, proc, byteSize))) + val node = DSN(Some(this), byteSize) + node.allocationRegions.add(StackRegion2(pos.toShortString, proc, byteSize)) + node.flags.stack = true node.addCell(0, byteSize) m + (offset -> node) case _ => m @@ -68,7 +73,9 @@ class DSG(val proc: Procedure, assert(!m(offset).cells(0).growSize(byteSize)) m else - val node = DSN(Some(this), Some(StackRegion2(pos.toShortString, proc, byteSize))) + val node = DSN(Some(this), byteSize) + node.allocationRegions.add(StackRegion2(pos.toShortString, proc, byteSize)) + node.flags.stack = true node.addCell(0, byteSize) m + (offset -> node) case _ => m @@ -95,23 +102,32 @@ class DSG(val proc: Procedure, var address: BigInt = global.address if swappedOffsets.contains(address) then address = swappedOffsets(address) - m + ((address, address + global.size/8) -> (DSN(Some(this), Some(DataRegion2(global.name, address, global.size))), 0)) + val node = DSN(Some(this), global.size) + node.allocationRegions.add(DataRegion2(global.name, address, global.size)) + node.flags.global = true + node.flags.incomplete = true + m + ((address, address + global.size/8) -> (node, 0)) + } externalFunctions.foreach( external => var address: BigInt = external.offset if swappedOffsets.contains(address) then address = swappedOffsets(address) - globalMapping.update((address, address), (DSN(Some(this), Some(DataRegion2(external.name, address, 0))), 0)) + val node = DSN(Some(this)) + node.allocationRegions.add(DataRegion2(external.name, address, 0)) + node.flags.global = true + node.flags.incomplete = true + globalMapping.update((address, address), (node, 0)) ) // determine if an address is a global and return the corresponding global if it is. - def isGlobal(address: BigInt): Option[(DSN, BigInt)] = + def isGlobal(address: BigInt): Option[((BigInt, BigInt), (DSN, BigInt))] = for (elem <- globalMapping) { val range = elem._1 if address >= range._1 && address <= range._2 then - return Some(elem._2) + return Some(elem) } None @@ -164,7 +180,7 @@ class DSG(val proc: Procedure, def getPointee(cell: DSC): DSC = if !pointTo.contains(cell) then - val node = DSN(Some(this), None) + val node = DSN(Some(this)) pointTo.update(cell, node.cells(0)) pointTo(cell) @@ -197,7 +213,7 @@ class DSG(val proc: Procedure, } ) - node.collapsed = true + node.flags.collapsed = true node.cells.clear() @@ -252,8 +268,7 @@ class DSG(val proc: Procedure, collapseNode(node1) collapseNode(node2) node2.allocationRegions.addAll(node1.allocationRegions) - if node2.region.isEmpty then - node2.region = node1.region + node2.flags.join(node1.flags) if pointTo.contains(node1.cells(0)) then if pointTo.contains(node2.cells(0)) then pointTo.update(node2.cells(0), mergeCells(getPointee(node1.cells(0)), getPointee(node2.cells(0)))) @@ -288,18 +303,16 @@ class DSG(val proc: Procedure, var lastOffset: BigInt = -1 var lastAccess: BigInt = -1 - val resultNode = DSN(Some(this), node1.region) + val resultNode = DSN(Some(this)) resultNode.allocationRegions.addAll(node1.allocationRegions ++ node2.allocationRegions) - if node1.region.isDefined then - resultNode.region = node1.region - else if node2.region.isDefined then - resultNode.region = node2.region - if node2.region.get.isInstanceOf[DataRegion2] then - globalMapping.foreach{ - case ((start: BigInt, end: BigInt), (node:DSN, offset: BigInt)) => - if node.equals(node2) then - globalMapping.update((start, end), (node, offset + delta)) - } + resultNode.flags.join(node1.flags) + resultNode.flags.join(node2.flags) + if node2.flags.global then + globalMapping.foreach{ + case ((start: BigInt, end: BigInt), (node:DSN, offset: BigInt)) => + if node.equals(node2) then + globalMapping.update((start, end), (node, offset + delta)) + } val resultCells: mutable.Map[BigInt, (Set[DSC], BigInt)] = mutable.Map() cells.foreach { case (offset: BigInt, cell: DSC) => @@ -351,13 +364,8 @@ class DSG(val proc: Procedure, private def isFormal(pos: CFGPosition, variable: Variable): Boolean = - variable != stackPointer && !reachingDefs(pos).contains(variable) + !reachingDefs(pos).contains(variable) - def unwrapPaddingAndSlicing(expr: Expr): Expr = - expr match - case Extract(end, start, body) if start == 0 && end == 32 => unwrapPaddingAndSlicing(body) - case ZeroExtend(extension, body) => unwrapPaddingAndSlicing(body) - case _ => expr val formals: mutable.Map[Variable, (DSC, BigInt)] = mutable.Map() val varToCell: Map[CFGPosition, mutable.Map[Variable, (DSC, BigInt)]] = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(Map[CFGPosition, mutable.Map[Variable, (DSC, BigInt)]]()) { @@ -367,22 +375,23 @@ class DSG(val proc: Procedure, value.variables.foreach( v => if isFormal(pos, v) then - val node = DSN(Some(this), None) + val node = DSN(Some(this)) + node.flags.incomplete = true node.rep = "formal" nodes.add(node) formals.update(v, (node.cells(0), 0)) ) - val node = DSN(Some(this), None) + val node = DSN(Some(this)) node.rep = "ssa" m + (pos -> mutable.Map(variable -> (node.cells(0), 0))) case DirectCall(proc, target, label) if proc.name == "malloc" => - val node = DSN(Some(this), None) + val node = DSN(Some(this)) node.rep = "ssa" m + (pos -> mutable.Map(mallocRegister -> (node.cells(0), 0))) case DirectCall(proc, target, label) if writesTo.contains(proc) => val result: Map[Variable, (DSC, BigInt)] = writesTo(proc).foldLeft(Map[Variable, (DSC, BigInt)]()){ (n, variable) => - val node = DSN(Some(this), None) + val node = DSN(Some(this)) node.rep = "ssa" n + (variable -> (node.cells(0), 0)) } @@ -390,7 +399,8 @@ class DSG(val proc: Procedure, case MemoryAssign(memory, MemoryStore(mem, index, expr: Expr, endian, size), label) if unwrapPaddingAndSlicing(expr).isInstanceOf[Variable] => val value: Variable = unwrapPaddingAndSlicing(expr).asInstanceOf[Variable] if isFormal(pos, value) then - val node = DSN(Some(this), None) + val node = DSN(Some(this)) + node.flags.incomplete = true node.rep = "formal" nodes.add(node) formals.update(value, (node.cells(0), 0)) @@ -400,23 +410,46 @@ class DSG(val proc: Procedure, } -class DSN(val graph: Option[DSG], var region: Option[MemoryRegion2], val id: Int = NodeCounter.getCounter) { - +class Flags() { var collapsed = false + var stack = false + var heap = false + var global = false + var unknown = false + var read = false + var modified = false + var incomplete = false + var foreign = false + + def join(other: Flags): Unit = + collapsed = collapsed || other.collapsed + stack = other.stack || stack + heap = other.heap || heap + global = other.global || global + unknown =other.unknown || unknown + read = other.read || read + modified = other.modified || modified + incomplete = other.incomplete || incomplete + foreign = other.foreign && foreign +} - val allocationRegions: mutable.Set[MemoryRegion2] = region match - case Some(value) => mutable.Set(value) - case None => mutable.Set() +class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCounter.getCounter) { + +// var collapsed = false + var flags = Flags() + def collapsed = flags.collapsed + + val allocationRegions: mutable.Set[MemoryRegion2] = mutable.Set() var rep: String = "" - var size: BigInt = region match - case Some(value) => value match - case DataRegion2(regionIdentifier, start, size) => size - case HeapRegion2(regionIdentifier, proc, size) => size - case StackRegion2(regionIdentifier, proc, size) => size - case UnknownRegion2(regionIdentifier, proc) => 0 - case None => 0 +// var size: BigInt = region match +// case Some(value) => value match +// case DataRegion2(regionIdentifier, start, size) => size +// case HeapRegion2(regionIdentifier, proc, size) => size +// case StackRegion2(regionIdentifier, proc, size) => size +// case UnknownRegion2(regionIdentifier, proc) => 0 +// case None => 0 val cells: mutable.Map[BigInt, DSC] = mutable.Map() this.addCell(0, 0) @@ -449,13 +482,6 @@ class DSN(val graph: Option[DSG], var region: Option[MemoryRegion2], val id: Int if collapsed then cells(0) else if !cells.contains(offset) then -// cells.foreach{ -// case (start:BigInt, cell:DSC) => -// if start < offset && offset < (start + cell.largestAccessedSize) then -// val internalOffset = offset - start -// cell.growSize(internalOffset + size) -// return (cell, internalOffset) -// } val cell = DSC(Some(this), offset) cells.update(offset, cell) cell.growSize(size) @@ -480,7 +506,6 @@ case class DSC(node: Option[DSN], offset: BigInt) { var largestAccessedSize: BigInt = 0 - def growSize(size: BigInt): Boolean = if size > largestAccessedSize then largestAccessedSize = size @@ -490,5 +515,55 @@ case class DSC(node: Option[DSN], offset: BigInt) override def toString: String = s"Cell(${if node.isDefined then node.get.toString else "NONE"}, $offset)" } +class CallSite(val call: DirectCall, val graph: DSG) { + val proc = call.target + val paramCells: Map[Variable, DSC] = graph.params(proc).foldLeft(Map[Variable, DSC]()) { + (m, reg) => + val node = DSN(Some(graph)) + node.flags.incomplete = true + m + (reg -> node.cells(0)) + } + val returnCells: Map[Variable, DSC] = graph.writesTo(proc).foldLeft(Map[Variable, DSC]()) { + (m, reg) => + val node = DSN(Some(graph)) + node.flags.incomplete = true + m + (reg -> node.cells(0)) + } +} + +def unwrapPaddingAndSlicing(expr: Expr): Expr = + expr match + case Extract(end, start, body) if start == 0 && end == 32 => unwrapPaddingAndSlicing(body) + case ZeroExtend(extension, body) => unwrapPaddingAndSlicing(body) + case _ => expr + +def decToBinary(n: BigInt): Array[Int] = { + val binaryNum: Array[Int] = new Array[Int](64) + var i = 0 + var num = n + while (num > 0) { + binaryNum(i) = (num % BigInt(2)).intValue + num = num / 2 + i += 1 + } + binaryNum +} + +def twosComplementToDec(binary: Array[Int]): BigInt = { + var result: BigInt = BigInt(0) + var counter: Int = 0 + binary.foreach( + n => + if counter == binary.length - 1 && n == 1 then + result = result - BigInt(2).pow(counter) + else if n == 1 then + result = result + BigInt(2).pow(counter) + counter += 1 + ) + result +} + +val BITVECNEGATIVE: BigInt = new BigInt(new BigInteger("9223372036854775808")) + diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index 9bc44acbf..708dc5f55 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -1,6 +1,6 @@ package analysis -import ir.{BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IntraProcIRCursor, Literal, LocalAssign, Memory, MemoryAssign, MemoryLoad, MemoryStore, Procedure, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, computeDomain, toShortString} +import ir.{BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IntraProcIRCursor, LocalAssign, MemoryAssign, MemoryLoad, MemoryStore, Procedure, Register, Variable, ZeroExtend, computeDomain, toShortString} import specification.{ExternalFunction, SpecGlobal} import scala.util.control.Breaks.{break, breakable} @@ -14,12 +14,12 @@ class Local( globals: Set[SpecGlobal], globalOffsets: Map[BigInt, BigInt], externalFunctions: Set[ExternalFunction], reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], - writesTo: Map[Procedure, Set[Register]] + writesTo: Map[Procedure, Set[Register]], + params: Map[Procedure, Set[Variable]] ) extends Analysis[Any]{ - val bitvecnegative: BigInt = new BigInt(new BigInteger("9223372036854775808")) - val mallocRegister = Register("R0", BitVecType(64)) - val stackPointer = Register("R31", BitVecType(64)) + private val mallocRegister = Register("R0", BitVecType(64)) + private val stackPointer = Register("R31", BitVecType(64)) private val visited: mutable.Set[CFGPosition] = mutable.Set() @@ -29,7 +29,7 @@ class Local( val position = syms._1 val innerMap = syms._2.foldLeft(Map[Variable, Set[SymbolicAccess]]()) { (m, access) => - if (m.contains(access._1.accessor)) then + if m.contains(access._1.accessor) then // every variable pointing to a stack region ONLY has one symbolic access associated with it. m(access._1.accessor).foreach( sym => assert(!sym.symbolicBase.isInstanceOf[StackRegion2]) @@ -63,33 +63,6 @@ class Local( case _ => None - - def decToBinary(n: BigInt): Array[Int] = { - val binaryNum: Array[Int] = new Array[Int](64) - var i = 0 - var num = n - while (num > 0) { - binaryNum(i) = (num % BigInt(2)).intValue - num = num / 2 - i += 1 - } - binaryNum - } - - def twosComplementToDec(binary: Array[Int]): BigInt = { - var result: BigInt = BigInt(0) - var counter: Int = 0 - binary.foreach( - n => - if counter == binary.length - 1 && n == 1 then - result = result - BigInt(2).pow(counter) - else if n == 1 then - result = result + BigInt(2).pow(counter) - counter += 1 - ) - result - } - var mallocCount: Int = 0 private def nextMallocCount = { @@ -97,14 +70,13 @@ class Local( s"malloc_$mallocCount" } - val graph = DSG(proc, constProp, varToSym, globals, globalOffsets, externalFunctions, reachingDefs, writesTo) + val graph: DSG = DSG(proc, constProp, varToSym, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params) def isGlobal(expr: Expr, pos: CFGPosition, size: Int = 0): Option[DSC] = if evaluateExpression(expr, constProp(pos)).isDefined && graph.isGlobal(evaluateExpression(expr, constProp(pos)).get.value).isDefined then val address = evaluateExpression(expr, constProp(pos)).get.value - val (node: DSN, internal: BigInt) = graph.isGlobal(evaluateExpression(expr, constProp(pos)).get.value).get - val baseAddress = node.region.get.asInstanceOf[DataRegion2].start + val ((baseAddress: BigInt, end: BigInt), (node: DSN, internal: BigInt)) = graph.isGlobal(evaluateExpression(expr, constProp(pos)).get.value).get val offset = address - baseAddress node.addCell(internal + offset, size) graph.optionalCollapse(node) @@ -124,13 +96,6 @@ class Local( else Set(graph.formals(arg)) - // this function is used to ignore slicing and padding between 32 bit and 64 bit values - // this can introduce unsoundness - def unwrapPaddingAndSlicing(expr: Expr): Expr = - expr match - case Extract(end, start, body) if start == 0 && end == 32 => unwrapPaddingAndSlicing(body) - case ZeroExtend(extension, body) => unwrapPaddingAndSlicing(body) - case _ => expr @@ -159,19 +124,14 @@ class Local( val node = cell.node.get // get the node of R_y var field = offset + cell.offset + internalOffset // calculate the total offset node.addCell(field, size) // add cell there if doesn't already exists -// graph.optionalCollapse(node) if node.collapsed then field = 0 graph.mergeCells(c, if pointee then graph.getPointee(node.getCell(field)) else node.getCell(field)) else -// if collapse then - val node = cell.node.get - graph.collapseNode(node) - graph.mergeCells(c, if pointee then graph.getPointee(node.cells(0)) else node.cells(0)) -// else -// cell.node.get.addCell(cell.offset + internalOffset, size) //update the size of the cell -//// graph.optionalCollapse(cell.node.get) -// graph.mergeCells(c, if pointee then graph.getPointee(cell.node.get.getCell(cell.offset + internalOffset)) else cell.node.get.getCell(cell.offset + internalOffset)) + val node = cell.node.get + graph.collapseNode(node) + graph.mergeCells(c, if pointee then graph.getPointee(node.cells(0)) else node.cells(0)) + } if pointee then cells.foreach( @@ -203,10 +163,22 @@ class Local( val size: BigInt = evaluateExpression(mallocRegister, constProp(n)) match case Some(value) => value.value case None => 0 - val node = DSN(Some(graph), Some(HeapRegion2(nextMallocCount, proc, size))) - graph.nodes.add(node) + val node = DSN(Some(graph), size) + node.allocationRegions.add(HeapRegion2(nextMallocCount, proc, size)) + node.flags.heap = true graph.mergeCells(graph.varToCell(n)(mallocRegister)._1, node.cells(0)) - + case call: DirectCall if params.contains(call.target) => + val cs = CallSite(call, graph) + graph.callsites.add(cs) + cs.paramCells.foreach{ + case (variable: Variable, cell: DSC) => + visitPointerArithmeticOperation(call, cell, variable, 0) + } + cs.returnCells.foreach{ + case (variable: Variable, cell: DSC) => + val returnArgument = graph.varToCell(n)(variable)._1 + graph.mergeCells(returnArgument, cell) + } case LocalAssign(variable, rhs, maybeString) => val expr: Expr = unwrapPaddingAndSlicing(rhs) val lhsCell = graph.varToCell(n)(variable)._1 @@ -219,10 +191,11 @@ class Local( else expr match case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) && arg1.equals(stackPointer) - && evaluateExpression(arg2, constProp(n)).isDefined && evaluateExpression(arg2, constProp(n)).get.value >= bitvecnegative => + && evaluateExpression(arg2, constProp(n)).isDefined && evaluateExpression(arg2, constProp(n)).get.value >= BITVECNEGATIVE => val size = twosComplementToDec(decToBinary(evaluateExpression(arg2, constProp(n)).get.value)) - val node = DSN(Some(graph), Some(StackRegion2("Stack_"+proc.name, proc, -size))) - graph.nodes.add(node) + val node = DSN(Some(graph)) + node.allocationRegions.add(StackRegion2("Stack_"+proc.name, proc, -size)) + node.flags.stack = true graph.mergeCells(lhsCell, node.cells(0)) case BinaryExpr(op, arg1: Variable, arg2) if /*varToSym.contains(n) && varToSym(n).contains(arg1) && */ evaluateExpression(arg2, constProp(n)).isDefined => @@ -234,6 +207,7 @@ class Local( case MemoryLoad(mem, index, endian, size) => val byteSize = (size.toDouble/8).ceil.toInt + lhsCell.node.get.flags.read = true if isGlobal(index, n, byteSize).isDefined then val global = isGlobal(index, n, byteSize).get graph.mergeCells(lhsCell, graph.getPointee(global)) @@ -273,6 +247,7 @@ class Local( } } val node = cell.node.get + node.flags.unknown = true graph.collapseNode(node) case MemoryAssign(memory, MemoryStore(mem, index, expr: Expr, endian, size), label) if unwrapPaddingAndSlicing(expr).isInstanceOf[Variable] => // if value is a literal ignore it @@ -288,12 +263,13 @@ class Local( case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => // assert(varToSym(n).contains(arg1)) val offset = evaluateExpression(arg2, constProp(n)).get.value - visitPointerArithmeticOperation(n, DSN(Some(graph), None).cells(0), arg1, byteSize, true, offset) + visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg1, byteSize, true, offset) case arg: Variable => // assert(varToSym(n).contains(arg)) - visitPointerArithmeticOperation(n, DSN(Some(graph), None).cells(0), arg, byteSize, true) + visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg, byteSize, true) case _ => ??? + addressPointee.node.get.flags.modified = true val valueCells = getCells(n, value) val result = valueCells.foldLeft(addressPointee) { (c, p) => diff --git a/src/main/scala/analysis/LocalDSA.scala b/src/main/scala/analysis/LocalDSA.scala deleted file mode 100644 index 22ee1e48f..000000000 --- a/src/main/scala/analysis/LocalDSA.scala +++ /dev/null @@ -1,131 +0,0 @@ -//package analysis -// -//import ir.{Assert, Assume, BVADD, BinOp, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, Call, DirectCall, Expr, Extract, GoTo, IndirectCall, IntraProcIRCursor, Literal, LocalAssign, LocalVar, Memory, MemoryAssign, MemoryLoad, MemoryStore, NOP, Procedure, Program, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, computeDomain} -// -//import java.math.BigInteger -//import scala.annotation.{static, tailrec} -// -// -//class LocalDSA(val program: Program, val procedure: Procedure, constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], var symbolicAccesses: Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]]) extends Analysis[Any] { -// val graph: Graph = Graph(procedure) -// -// -// private val stackPointer = Register("R31", BitVecType(64)) -// private val linkRegister = Register("R30", BitVecType(64)) -// private val framePointer = Register("R29", BitVecType(64)) -// -// val bitvecnegative: BigInt = new BigInt(new BigInteger("9223372036854775808")) -// -// -// private val ignoreRegions: Set[Expr] = Set(linkRegister, framePointer) -// -// val malloc_register = Register("R0", BitVecType(64)) -// private var localVarCount: Int = -1 -// private def getNextLocalVarName: String = { -// localVarCount += 1 -// s"NormVar_$localVarCount" -// } -// -//// for (i <- 0 to 31) { -//// graph.pointersToCells.update(Register(s"R$i", BitVecType(64)), graph.makeCell()) -//// } -// -// def visitBinaryLocalAssign(lhs: Variable, op: BinOp, arg1: Variable, offset: BigInt) = { -// val cell = graph.getVariablePointee(arg1) -// val node = cell.node.get -// if node.isCollapsed then -// graph.collapsePointer(lhs) // TODO ensure passing right memory region here -// else if !node.isSeq /* && offset == 0 */ then // TODO here we are making everything with a offset a sequence -// val size = cell.offset + offset + 8 // assuming bitvector of 64, all the fields that matter are pointers -// node.updateSize(size) -// graph.unify(lhs, node.cell(cell.offset + offset)) -// else -// node.setSeq() -// val size = node.size.gcd(cell.offset) -// node.updateSize(size) -// graph.unify(lhs, cell) -// } -// -// def atomicPointer(n: CFGPosition) : Unit = { -// n match -// case DirectCall(target: Procedure, returnTarget, label) if procedure.name.equals("malloc") => -// val cell = graph.makeCell(Some(symbolicAccesses(n)(malloc_register).head.symbolicBase)) -// graph.unify(malloc_register, cell) -// // case _ => // TODO ignoring all other calls right now. Think about semantics of a call -// // should unify returns -// case LocalAssign(variable, expr, maybeString) => -// expr match -// case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) && arg1.equals(stackPointer) -// && evaluateExpression(arg2, constantProp(n)).isDefined && evaluateExpression(arg2, constantProp(n)).get.value >= bitvecnegative => -// // p = &x -// val node = graph.makeNode(Some(symbolicAccesses(n)(variable).head.symbolicBase)) -// val cell = node.cell() -// graph.unify(variable, cell) -//// case BinaryExpr(op, arg1: Variable, arg2) if symbolicAccesses(n).contains(arg1) && evaluateExpression(arg2, constantProp(n)).isDefined => // what TODO if can't evaluate arg2 -//// // variable = arg1 + (c = 0 * m) + arg2 -//// val offset: BigInt = evaluateExpression(arg2, constantProp(n)).get.value -//// visitBinaryLocalAssign(variable, op, arg1, offset) -//// case vari: Variable if symbolicAccesses(n).contains(vari) => // TODO actually check if q is a pointer -//// // p = q -//// val cell = graph.getVariablePointee(vari) -//// val node = cell.node.get -//// if node.isCollapsed then -//// graph.collapsePointer(variable) // TODO ensure passing right memory region here -//// else if !node.isSeq then -//// val size = cell.offset + 8 // assume all sizes are the same for now since we don't know sizes of everything -//// node.updateSize(size) -//// graph.unify(variable, cell) -//// else -//// node.setSeq() -//// graph.unify(variable, cell) // c is zero here -// case MemoryLoad(mem, index, endian, size) => -// // q = *p -// index match -// case BinaryExpr(op, arg1: Variable, arg2) if symbolicAccesses(n).contains(arg1) && evaluateExpression(arg2, constantProp(n)).isDefined => -// val local = symbolicAccesses(n).keySet.reduce( -// (v1, v2) => -// if v1.name.startsWith("NormVar") then -// v1 -// else if v2.name.startsWith("NormVar") then -// v2 -// else -// v1 -// ) -// assert(local.name.startsWith("NormVar")) -// visitBinaryLocalAssign(local, op, arg1, evaluateExpression(arg2, constantProp(n)).get.value) -// graph.getCellPointee(graph.getVariablePointee(local)).unify(graph.getVariablePointee(variable)) -// case vari: Variable if symbolicAccesses(n).contains(vari) => -// graph.getCellPointee(graph.getVariablePointee(vari)).unify(graph.getVariablePointee(variable)) -// case _ => -// case _ => -// case MemoryAssign(me, MemoryStore(mem, index, value, endian, size), label) => -// //*p = q -// index match -// case BinaryExpr(op, arg1: Variable, arg2) if symbolicAccesses(n).contains(arg1) => -// val local = symbolicAccesses(n).keySet.reduce( -// (v1, v2) => -// if v1.name.startsWith("NormVar") then -// v1 -// else if v2.name.startsWith("NormVar") then -// v2 -// else -// v1 -// ) -// assert(local.name.startsWith("NormVar")) -// visitBinaryLocalAssign(local, op, arg1, evaluateExpression(arg2, constantProp(n)).get.value) -// graph.getCellPointee(graph.getVariablePointee(local)). -// unify(graph.getVariablePointee(value.variables.head)) -// case vari: Variable if symbolicAccesses(n).contains(vari) => -// graph.getCellPointee(graph.getVariablePointee(vari)).unify(graph.getVariablePointee(value.variables.head)) -// case _ => -// case _ => -// } -// -// def analyze(): Any = { -// computeDomain(IntraProcIRCursor, Set(procedure)).foreach(atomicPointer) -// -//// println(graph.pointersToCells) -//// println(graph.pointsToRelations) -//// println(graph.nodes) -// } -//} diff --git a/src/main/scala/analysis/PointerTypeAnalysis.scala b/src/main/scala/analysis/PointerTypeAnalysis.scala deleted file mode 100644 index e8aed8504..000000000 --- a/src/main/scala/analysis/PointerTypeAnalysis.scala +++ /dev/null @@ -1,83 +0,0 @@ -package analysis - -import analysis.solvers.{BackwardIDESolver, ForwardIDESolver} -import ir.{BinaryExpr, BitVecType, CFGPosition, DirectCall, Extract, GoTo, IRWalk, IndirectCall, Literal, LocalAssign, Memory, MemoryAssign, MemoryLoad, MemoryStore, Procedure, Program, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, end} - -import scala.collection.immutable.Map -import scala.collection.mutable - -trait PointerTypeFunctions extends BackwardIDEAnalysis[Variable, TwoElement, TwoElementLattice] { - - val valuelattice: TwoElementLattice = TwoElementLattice() - val edgelattice: EdgeFunctionLattice[TwoElement, TwoElementLattice] = EdgeFunctionLattice(valuelattice) - import edgelattice.{IdEdge, ConstEdge} - - def edgesOther(n: CFGPosition)(d: DL): Map[DL, EdgeFunction[TwoElement]] = { - n match - case s: CFGPosition if end(program.mainProcedure).equals(s) => - d match - case Left(value) => Map(d -> IdEdge()) - case Right(_) => Map(d -> IdEdge(), Left(Register("R31", BitVecType(64))) -> ConstEdge(TwoElementTop)) - - case LocalAssign(lhs, expr, _) => - expr match - case BinaryExpr(op, arg1, arg2) => - d match - case Left(value) if value == arg1 => Map(Left(lhs) -> IdEdge()) - case Left(value) if value == lhs => Map() - case _ => Map(d -> IdEdge()) - case rhs: Variable => - d match - case Left(value) if value == rhs => Map(Left(lhs) -> IdEdge()) - case Left(value) if value == lhs => Map() - case _ => Map(d -> IdEdge()) - case MemoryLoad(mem, index, endian, size) => - index match - case BinaryExpr(op, arg1: Variable, arg2) => - d match - case Left(value) if value == lhs || value == arg1 => Map() - case Left(value) => Map(d -> IdEdge()) - case Right(_) => Map(d -> IdEdge(), Left(arg1) -> ConstEdge(TwoElementTop)) - case variable: Variable => - d match - case Left(value) if value == lhs || value == variable => Map() - case Left(value) => Map(d -> IdEdge()) - case Right(_) => Map(d -> IdEdge(), Left(variable) -> ConstEdge(TwoElementTop)) - case _ => - d match - case Left(value) if value == lhs => Map() - case Left(value) => Map(d -> IdEdge()) - case Right(_) => Map(d -> IdEdge()) - case _ => - d match - case Left(value) if value == lhs => Map() - case Left(value) => Map(d -> IdEdge()) - case Right(_) => Map(d -> IdEdge()) - case MemoryAssign(mem, MemoryStore(mem2, index, value, endian, size), _) => - index match - case BinaryExpr(op, arg1: Variable, arg2) => - d match - case Left(value) if value != arg1 => Map(d -> IdEdge()) - case Left(value) if value == arg1 => Map() - case Right (_) => Map(d -> IdEdge(), Left(arg1) -> ConstEdge(TwoElementTop)) - case variable: Variable => - d match - case Left(value) if value != variable => Map(d -> IdEdge()) - case Left(value) if value == variable => Map() - case Right (_) => Map(d -> IdEdge(), Left(variable) -> ConstEdge(TwoElementTop)) - case _ => Map(d -> IdEdge()) - case _ => Map(d -> IdEdge()) - } - - def edgesCallToEntry(call: GoTo, entry: IndirectCall)(d: DL): Map[DL, EdgeFunction[TwoElement]] = Map(d -> IdEdge()) - - def edgesExitToAfterCall(exit: Procedure, aftercall: DirectCall)(d: DL): Map[DL, EdgeFunction[TwoElement]] = Map(d -> IdEdge()) - - def edgesCallToAfterCall(call: GoTo, aftercall: DirectCall)(d: DL): Map[DL, EdgeFunction[TwoElement]] = - d match - case Left(value) => Map() // maps all variables before the call to bottom - case Right(_) => Map(d -> IdEdge()) -} - -class PointerTypeAnalysis(program: Program) - extends BackwardIDESolver[Variable, TwoElement, TwoElementLattice](program), PointerTypeFunctions diff --git a/src/main/scala/analysis/PrePass.scala b/src/main/scala/analysis/PrePass.scala deleted file mode 100644 index 2328f919b..000000000 --- a/src/main/scala/analysis/PrePass.scala +++ /dev/null @@ -1,184 +0,0 @@ -//package analysis -// -//import analysis.solvers.{SimplePushDownWorklistFixpointSolver, SimpleWorklistFixpointSolver} -//import ir.IRWalk.procedure -//import ir.{Assert, Assume, BVADD, BVAND, BVASHR, BVBinOp, BVCOMP, BVCONCAT, BVEQ, BVLSHR, BVMUL, BVNAND, BVNEQ, BVNOR, BVOR, BVSDIV, BVSGE, BVSGT, BVSHL, BVSLE, BVSLT, BVSMOD, BVSREM, BVSUB, BVUDIV, BVUGE, BVUGT, BVULE, BVULT, BVUREM, BVXNOR, BVXOR, BinOp, BinaryExpr, BitVecLiteral, BitVecType, BoolBinOp, CFGPosition, Call, DirectCall, Expr, Extract, GoTo, IntBinOp, IntraProcIRCursor, Literal, LocalAssign, LocalVar, Memory, MemoryAssign, MemoryLoad, MemoryStore, NOP, Procedure, Program, Register, Repeat, SignExtend, Statement, UnaryExpr, Variable, ZeroExtend, computeDomain} -//import specification.SpecGlobal -//import util.Logger -// -//import java.math.BigInteger -//import scala.collection.mutable -//import scala.math.BigInt -// -//case class SymbolicAccess(symbolicBase: MemoryRegion2, offset: BigInt) { -// override def toString: String = s"SymbolicAccess($symbolicBase, $offset)" -//} -// -//class PrePass(program: Program, -// constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], globals: Set[SpecGlobal], globalAddresses: Map[BigInt, String], globalOffsets: Map[BigInt, BigInt]) -// -// extends Analysis[Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]]], -// IRIntraproceduralForwardDependencies, -// SimpleWorklistFixpointSolver[CFGPosition, Map[Variable, Set[SymbolicAccess]], -// MapLattice[Variable, Set[SymbolicAccess], PowersetLattice[SymbolicAccess]]] { -// -// val domain: Set[CFGPosition] = computeDomain(IntraProcIRCursor, program.procedures).toSet -// val lattice: MapLattice[CFGPosition, Map[Variable, Set[SymbolicAccess]], -// MapLattice[Variable, Set[SymbolicAccess], PowersetLattice[SymbolicAccess]]] = MapLattice(MapLattice(PowersetLattice())) -// -// var mallocCount: Int -// = 0 -// private var stackCount: Int = 0 -// -// private def nextMallocCount = { -// mallocCount += 1 -// s"malloc_$mallocCount" -// } -// -// private def nextStackCount = { -// stackCount += 1 -// s"stack_$stackCount" -// } -// -// private var localVarCount: Int = -1 -// -// private def getNextLocalVarName: String = { -// localVarCount += 1 -// s"NormVar_$localVarCount" -// } -// -// -// private val stackPointer = Register("R31", BitVecType(64)) -// private val linkRegister = Register("R30", BitVecType(64)) -// private val framePointer = Register("R29", BitVecType(64)) -// private val mallocVariable = Register("R0", BitVecType(64)) -// -// def updateOffsets(variable: Variable, arg: Variable, offsetChange:BigInt, op: BinOp, s: Map[Variable, Set[SymbolicAccess]]): Map[Variable, Set[SymbolicAccess]] = { -// val newSyms: mutable.Set[SymbolicAccess] = mutable.Set() -// s(arg).foreach( -// sym => -// op match -// case BVADD => newSyms.add(SymbolicAccess(sym.symbolicBase, sym.offset + offsetChange)) -// case BVSUB => newSyms.add(SymbolicAccess(sym.symbolicBase, sym.offset - offsetChange)) -// case _ => ???// check if this happens often -// ) -// s + (variable -> newSyms.toSet) -// } -// -// -// def decToBinary(n: BigInt): Array[Int] = { -// val binaryNum: Array[Int] = new Array[Int](64) -// var i = 0 -// var num = n -// while (num > 0) { -// binaryNum(i) = (num % BigInt(2)).intValue -// num = num / 2 -// i += 1 -// } -// binaryNum -// } -// -// def twosComplementToDec(binary: Array[Int]): BigInt = { -// var result: BigInt = BigInt(0) -// var counter: Int = 0 -// binary.foreach( -// n => -// if counter == binary.length - 1 && n == 1 then -// result = result - BigInt(2).pow(counter) -// else if n == 1 then -// result = result + BigInt(2).pow(counter) -// counter += 1 -// ) -// result -// } -// -// def transfer(n: CFGPosition, symbolicAccesses: Map[Variable, Set[SymbolicAccess]]): Map[Variable, Set[SymbolicAccess]] = { -// -// val bitvecnegative: BigInt = new BigInt(new BigInteger("9223372036854775808")) //"18446744073709551615" -// -// val s = symbolicAccesses.filter((v, se) => -// !v.name.startsWith("NormVar") -// ) -// -// n match -// case LocalAssign(variable, expr, maybeString) => -// expr match -// case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) && arg1.equals(stackPointer) -// && evaluateExpression(arg2, constProp(n)).isDefined && evaluateExpression(arg2, constProp(n)).get.value >= bitvecnegative => -// val size = twosComplementToDec(decToBinary(evaluateExpression(arg2, constProp(n)).get.value)) -// s + (variable -> Set(SymbolicAccess(StackRegion2(s"Stack_${procedure(n).name}", procedure(n), BitVecLiteral(-size.intValue, 64)), 0))) -// case BinaryExpr(op, arg1: Variable, arg2) if s.contains(arg1) => // arg1 is a symbolic access variable -// evaluateExpression(arg2, constProp(n)) match -// case Some(value) => // arg2 is some constant -// updateOffsets(variable, arg1, value.value, op, s) -// case None => // couldn't evaluate R2 to a constant -// arg2 match -// case vari:Variable if s.contains(vari) => -// evaluateExpression(arg1, constProp(n)) match -// case Some(value) => -// updateOffsets(variable, vari, value.value, op, s) -// case None => s - variable -// case _ => -// s - variable -// case BinaryExpr(op, arg1, arg2: Variable) if s.contains(arg2) => -// evaluateExpression(arg1, constProp(n)) match -// case Some(value) => // arg1 is some constant -// updateOffsets(variable, arg2, value.value, op, s) -// case None => s - variable // couldn't evaluate R1 to a constant -// case vari: Variable if s.contains(vari) => -// s + (variable -> s(vari)) -// case MemoryLoad(mem, index, endian, size) => -// index match -// case BinaryExpr(op, arg1: Variable, arg2) if s.contains(arg1) && evaluateExpression(arg2, constProp(n)).isDefined => -// val local = LocalVar(getNextLocalVarName, BitVecType(64)) -// updateOffsets(local, arg1, evaluateExpression(arg2, constProp(n)).get.value, op, s) -// case _ => s -// case _ if s.contains(variable) => -// s - variable -// case _ => s -// case directCall: DirectCall if directCall.target.name == "malloc" => -// nextMallocCount -// evaluateExpression(mallocVariable, constProp(n)) match -// case Some(value) => -// s + (mallocVariable -> Set(SymbolicAccess(HeapRegion2(s"Malloc-${mallocCount}", value), 0))) -// case None => -// s + (mallocVariable -> Set(SymbolicAccess(HeapRegion2(s"Malloc-${mallocCount}", BitVecLiteral(-1, 64)), 0))) -// case MemoryAssign(mem, MemoryStore(m, index, value, endian, size), label) => -// index match -// case BinaryExpr(op, arg1: Variable, arg2) if s.contains(arg1) && evaluateExpression(arg2, constProp(n)).isDefined => -// val local = LocalVar(getNextLocalVarName, BitVecType(64)) -// updateOffsets(local, arg1, evaluateExpression(arg2, constProp(n)).get.value, op, s) -// case _ => s -// case _ => s -// } -// -// -//// override def analyze() = { -//// val results = super.analyze() -//// var offsetMapping -//// results.foreach( -//// -//// ) -//// results -//// } -//} -// -//trait MemoryRegion2 { -// val regionIdentifier: String -//} -// -//case class StackRegion2(override val regionIdentifier: String, parent: Procedure, size: BitVecLiteral) extends MemoryRegion2 { -// override def toString: String = s"Stack($regionIdentifier, ${parent.name}, $size)" -//// ${if symbolicAccess.isDefined then s", ${symbolicAccess.get}" else ""} -//} -// -//case class HeapRegion2(override val regionIdentifier: String, size: BitVecLiteral) extends MemoryRegion2 { -// override def toString: String = s"Heap($regionIdentifier, $size)" -//} -// -//case class DataRegion2(override val regionIdentifier: String, start: BitVecLiteral) extends MemoryRegion2 { -// override def toString: String = s"Data($regionIdentifier, $start)" -//} -// -// -// diff --git a/src/main/scala/analysis/RegionBuilder.scala b/src/main/scala/analysis/RegionBuilder.scala deleted file mode 100644 index c752c1559..000000000 --- a/src/main/scala/analysis/RegionBuilder.scala +++ /dev/null @@ -1,336 +0,0 @@ -//package analysis -// -//import analysis.solvers.UnionFindSolver -//import ir.{BinaryExpr, BitVecLiteral, CFGPosition, Expr, Extract, InterProcIRCursor, IntraProcIRCursor, Literal, LocalAssign, Memory, MemoryAssign, MemoryLoad, MemoryStore, Procedure, Program, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, computeDomain} -//import specification.{ExternalFunction, SpecGlobal} -// -//import scala.collection.mutable -//import scala.util.boundary, boundary.break -// -//class RegionBuilder(program: Program, symResults: Map[CFGPosition, Map[SymbolicAccess, TwoElement]], -// constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], -// globals: Set[SpecGlobal], globalOffsets: Map[BigInt, BigInt], -// externalFunctions: Set[ExternalFunction]) extends Analysis[Any] { -// -// val graphs: mutable.Map[Procedure, DSG] = mutable.Map() -// val nodes: mutable.Map[MemoryRegion2, DSN] = mutable.Map() -// val solver: UnionFindSolver[StTerm] = UnionFindSolver() -// val loadStore: mutable.Set[CFGPosition] = mutable.Set() -// val pointTo: mutable.Map[DSC, DSC] = mutable.Map() -// -// -// private def replaceInPointTo(oldCell: DSC, newCell:DSC) = -// pointTo.foreach{ -// case (pointer, pointee) => -// if pointee.equals(oldCell) then -// pointTo.update(pointer, newCell) -// } -// -// private def getPointee(cell: DSC): DSC = -// if !pointTo.contains(cell) then -// val node = DSN(None, None) -// pointTo.update(cell, node.cells(0)) -// pointTo(cell) -// -// -// -// private def earlyCollapse(node: DSN) : Unit = -// node.collapsed = true -// node.cells.clear() -// node.addCell(0, 0) -// -// private def collapseNode(node: DSN): Unit = -// val e = DSC(None, 0) -// val cell = node.cells.foldLeft(e){ -// (c, field) => mergeCells(c, getPointee(field._2)) -// } -// earlyCollapse(node) -// pointTo.update(node.cells(0), cell) -// -// -// private def mergeCells(cell1: DSC, cell2: DSC): DSC = -// if (incompatibleTypes(cell1, cell2)) then -// collapseNode(cell2.node.get) -// -// if cell2.node.get.region.isEmpty && cell1.node.isDefined then -// cell2.node.get.region = cell1.node.get.region -// -// if cell2.node.get.collapsed then -// if cell1.node.isDefined then -// cell1.node.get.cells.foreach{ -// case (offset, cell) => -// if pointTo.contains(cell) then -// if pointTo.contains(cell2.node.get.cells(0)) then -// mergeCells(getPointee(cell), getPointee(cell2.node.get.cells(0))) -// else -// pointTo.update(cell2.node.get.cells(0), getPointee(cell)) -// pointTo.remove(cell) -// replaceInPointTo(cell, cell2.node.get.cells(0)) -// } -// cell2.node.get.cells(0) -// else -// if pointTo.contains(cell1) then -// if pointTo.contains(cell2.node.get.cells(0)) then -// mergeCells(getPointee(cell1), getPointee(cell2.node.get.cells(0))) -// else -// pointTo.update(cell2.node.get.cells(0), getPointee(cell1)) -// pointTo.remove(cell1) -// replaceInPointTo(cell1, cell2.node.get.cells(0)) -// cell2.node.get.cells(0) -// else -// cell1.node.get.cells.foreach{ -// case (offset, cell) => -// if pointTo.contains(cell) then -// if pointTo.contains(cell2.node.get.cells(offset)) then -// mergeCells(getPointee(cell), getPointee(cell2.node.get.cells(offset))) -// else -// pointTo.update(cell2.node.get.cells(offset), getPointee(cell)) -// pointTo.remove(cell) -// replaceInPointTo(cell, cell2.node.get.cells(offset)) -// -// -// } -// cell2 -// -// -// private def incompatibleTypes(cell1: DSC, cell2: DSC): Boolean = -// if cell2.node.get.collapsed then -// return false -// else if cell1.node.isEmpty then -// return true // TODO not sure about this -// else if cell1.node.get.cells.size != cell2.node.get.cells.size then -// return true -// else -// (cell1.node.get.cells zip cell2.node.get.cells).foreach { -// case ((o1, c1), (o2, c2)) => -// if o1 != o2 || !c1.accessedSizes.equals(c2.accessedSizes) then -// return true -// } -// false -// -// private def multiAccessesSizes(node: DSN): Boolean = -// node.cells.foreach( -// c => -// val cell = c._2 -// if cell.accessedSizes.size > 1 then -// return true -// ) -// false -// -// -// private val swappedOffsets = globalOffsets.map(_.swap) -// -// val globalMapping: mutable.Map[(BigInt, BigInt), DSN] = globals.foldLeft(mutable.Map[(BigInt, BigInt), DSN]()) { -// (m, global) => -// var address: BigInt = global.address -// if swappedOffsets.contains(address) then -// address = swappedOffsets(address) -// m + ((address, address + global.size) -> DSN(None, Some(DataRegion2(global.name, global.address, global.size)))) -// } -// -// externalFunctions.foreach( -// external => -// var address: BigInt = external.offset -// if swappedOffsets.contains(address) then -// address = swappedOffsets(address) -// globalMapping.update((address, address), DSN(None, Some(DataRegion2(external.name, address, 0)))) -// ) -// -// -// val varToSym: Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]] = symResults.foldLeft(Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]]()) { -// (outerMap, syms) => -// val position = syms._1 -// val innerMap = syms._2.foldLeft(Map[Variable, Set[SymbolicAccess]]()) { -// (m, access) => -// val b = position -// if (m.contains(access._1.accessor)) then -// m + (access._1.accessor -> (m(access._1.accessor) + access._1)) -// else -// m + (access._1.accessor -> Set(access._1)) -// } -// -// outerMap + (position -> innerMap) -// } -// -// private def isGlobal(address: BigInt): Option[DSN] = -// for (elem <- globalMapping) { -// val range = elem._1 -// if address >= range._1 && address <= range._2 then -// return Some(elem._2) -// } -// None -// -// -// -// private def buildNode(sym: SymbolicAccess, offset: BigInt, size: Int): Unit = -// val region = sym.symbolicBase -// val newOffset = sym.offset + offset -// val proc = region match -// case DataRegion2(regionIdentifier, start, size) => ??? -// case HeapRegion2(regionIdentifier, proc, size) => proc -// case StackRegion2(regionIdentifier, proc, size) => proc -// case UnknownRegion2(regionIdentifier, proc) => proc -// val graph = graphs(proc) -// val node = graph.addNode(region, newOffset, size) -// nodes.update(region, node) -// -// private def getCell(sym: SymbolicAccess, offset: BigInt): DSC = -// val region = sym.symbolicBase -// val newOffset = sym.offset + offset -// val node = nodes(region) -// if node.collapsed then -// node.cells(0) -// else -// node.cells(newOffset) -// -// private def visit(n: CFGPosition): Unit = -// n match -// case LocalAssign(variable, expr, maybeString) => -// expr match -// case MemoryLoad(mem, index, endian, size) => -// val byteSize = (size.toDouble/8).ceil.toInt -// if evaluateExpression(index, constProp(n)).isDefined && isGlobal(evaluateExpression(index, constProp(n)).get.value).isDefined then -// val address = evaluateExpression(index, constProp(n)).get.value -// val node: DSN = isGlobal(evaluateExpression(index, constProp(n)).get.value).get -// val baseAddress = node.region.get.asInstanceOf[DataRegion2].start -// val offset = address - baseAddress -// node.addCell(offset, size) -// loadStore.add(n) -// else -// index match -// case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => -// assert(varToSym(n).contains(arg1)) -// val offset = evaluateExpression(arg2, constProp(n)).get.value -// varToSym(n)(arg1).foreach(sym => buildNode(sym, offset, byteSize)) -// loadStore.add(n) -// case arg: Variable => -// assert(varToSym(n).contains(arg)) -// varToSym(n)(arg).foreach(sym => buildNode(sym, 0, byteSize)) -// loadStore.add(n) -// case _ => ??? -// case _ => -// case MemoryAssign(mem, MemoryStore(mem2, index, value, endian, size), label) => -// val byteSize = (size.toDouble/8).ceil.toInt -// if evaluateExpression(index, constProp(n)).isDefined && isGlobal(evaluateExpression(index, constProp(n)).get.value).isDefined then -// val address = evaluateExpression(index, constProp(n)).get.value -// val node: DSN = isGlobal(evaluateExpression(index, constProp(n)).get.value).get -// val baseAddress = node.region.get.asInstanceOf[DataRegion2].start -// val offset = address - baseAddress -// node.addCell(offset, size) -// loadStore.add(n) -// else -// index match -// case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => -// assert(varToSym(n).contains(arg1)) -// val offset = evaluateExpression(arg2, constProp(n)).get.value -// varToSym(n)(arg1).foreach(sym => buildNode(sym, offset, byteSize)) -// loadStore.add(n) -// value match -// case BinaryExpr(op, arg1: Variable, arg2) if varToSym(n).contains(arg1) && evaluateExpression(arg2, constProp(n)).isDefined => -// val offset = evaluateExpression(arg2, constProp(n)).get.value -// varToSym(n)(arg1).foreach(sym => buildNode(sym, offset, byteSize)) -// case variable: Variable if varToSym(n).contains(variable) => -// varToSym(n)(variable).foreach(sym => buildNode(sym, 0, byteSize)) -// case _ => -// case arg: Variable => -// assert(varToSym(n).contains(arg)) -// varToSym(n)(arg).foreach(sym => buildNode(sym, 0, byteSize)) -// loadStore.add(n) -// value match -// case BinaryExpr(op, arg1: Variable, arg2) if varToSym(n).contains(arg1) && evaluateExpression(arg2, constProp(n)).isDefined => -// val offset = evaluateExpression(arg2, constProp(n)).get.value -// varToSym(n)(arg1).foreach(sym => buildNode(sym, offset, byteSize)) -// case variable: Variable if varToSym(n).contains(variable) => -// varToSym(n)(variable).foreach(sym => buildNode(sym, 0, byteSize)) -// -// case _ => -// case _ => ??? -// case _ => -// -// private def coolVisit(n: CFGPosition): Unit = -// n match -// case LocalAssign(variable, expr, maybeString) => -// val pointers : mutable.Set[DSC] = mutable.Set() -// varToSym(n).getOrElse(variable, Set()).foreach(sym => pointers.add(getCell(sym, 0))) -// -// expr match -// case MemoryLoad(mem, index, endian, size) => -// val byteSize = (size.toDouble / 8).ceil.toInt -// val pointees: mutable.Set[DSC] = mutable.Set() -// if evaluateExpression(index, constProp(n)).isDefined && isGlobal(evaluateExpression(index, constProp(n)).get.value).isDefined then -// val address = evaluateExpression(index, constProp(n)).get.value -// val node: DSN = isGlobal(evaluateExpression(index, constProp(n)).get.value).get -// val baseAddress = node.region.get.asInstanceOf[DataRegion2].start -// val offset = address - baseAddress -// if node.collapsed then pointees.add(getPointee(node.cells(0))) else pointees.add(getPointee(node.cells(offset))) -// else -// index match -// case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => -// assert(varToSym(n).contains(arg1)) -// val offset = evaluateExpression(arg2, constProp(n)).get.value -// varToSym(n)(arg1).foreach(sym => pointees.add(getPointee(getCell(sym, offset)))) -// case arg: Variable => -// assert(varToSym(n).contains(arg)) -// varToSym(n)(arg).foreach(sym => pointees.add(getPointee(getCell(sym, 0)))) -// case _ => ??? -// pointees.foreach( -// pointee => -// pointers.foreach( -// pointer => mergeCells(pointer, pointee) -// ) -// ) -// case _ => -// case MemoryAssign(mem, MemoryStore(mem2, index, value, endian, size), label) => -// val pointees : mutable.Set[DSC] = mutable.Set() -// val pointers: mutable.Set[DSC] = mutable.Set() -// val byteSize = (size.toDouble / 8).ceil.toInt -// if evaluateExpression(index, constProp(n)).isDefined && isGlobal(evaluateExpression(index, constProp(n)).get.value).isDefined then -// val address = evaluateExpression(index, constProp(n)).get.value -// val node: DSN = isGlobal(evaluateExpression(index, constProp(n)).get.value).get -// val baseAddress = node.region.get.asInstanceOf[DataRegion2].start -// val offset = address - baseAddress -// if node.collapsed then pointees.add(getPointee(node.cells(0))) else pointees.add(getPointee(node.cells(offset))) -// else -// index match -// case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => -// assert(varToSym(n).contains(arg1)) -// val offset = evaluateExpression(arg2, constProp(n)).get.value -// varToSym(n)(arg1).foreach(sym => pointees.add(getPointee(getCell(sym, offset)))) -// value match -// case BinaryExpr(op, arg1: Variable, arg2) if varToSym(n).contains(arg1) && evaluateExpression(arg2, constProp(n)).isDefined => -// val offset = evaluateExpression(arg2, constProp(n)).get.value -// varToSym(n)(arg1).foreach(sym => pointers.add(getCell(sym, offset))) -// case variable: Variable if varToSym(n).contains(variable) => -// varToSym(n)(variable).foreach(sym => pointers.add(getCell(sym, 0))) -// case _ => -// case arg: Variable => -// assert(varToSym(n).contains(arg)) -// varToSym(n)(arg).foreach(sym => pointees.add(getPointee(getCell(sym, 0)))) -// value match -// case BinaryExpr(op, arg1: Variable, arg2) if varToSym(n).contains(arg1) && evaluateExpression(arg2, constProp(n)).isDefined => -// val offset = evaluateExpression(arg2, constProp(n)).get.value -// varToSym(n)(arg1).foreach(sym => pointers.add(getCell(sym, offset))) -// case variable: Variable if varToSym(n).contains(variable) => -// varToSym(n)(variable).foreach(sym => pointers.add(getCell(sym, 0))) -// -// case _ => -// case _ => ??? -// pointees.foreach( -// pointee => -// pointers.foreach( -// pointer => mergeCells(pointer, pointee) -// ) -// ) -// case _ => -// -// -// def analyze(): Any = ??? -//// program.procedures.foreach(proc => graphs.update(proc, DSG(proc))) -//// computeDomain(InterProcIRCursor, Set(program.mainProcedure)).foreach(visit) -//// nodes.values.foreach(earlyCollapse) -//// loadStore.foreach(coolVisit) -//// pointTo.foreach{ -//// case (cell1, cell2) => -//// println(cell1.toString + " -> " + cell2.toString) -//// } -//} diff --git a/src/main/scala/analysis/SymbolicAccessAnalysis.scala b/src/main/scala/analysis/SymbolicAccessAnalysis.scala index d412c2f5f..1579e4155 100644 --- a/src/main/scala/analysis/SymbolicAccessAnalysis.scala +++ b/src/main/scala/analysis/SymbolicAccessAnalysis.scala @@ -55,37 +55,6 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem val edgelattice: EdgeFunctionLattice[TwoElement, TwoElementLattice] = EdgeFunctionLattice(valuelattice) import edgelattice.{IdEdge, ConstEdge} - def decToBinary(n: BigInt): Array[Int] = { - val binaryNum: Array[Int] = new Array[Int](64) - var i = 0 - var num = n - while (num > 0) { - binaryNum(i) = (num % BigInt(2)).intValue - num = num / 2 - i += 1 - } - binaryNum - } - - def twosComplementToDec(binary: Array[Int]): BigInt = { - var result: BigInt = BigInt(0) - var counter: Int = 0 - binary.foreach( - n => - if counter == binary.length - 1 && n == 1 then - result = result - BigInt(2).pow(counter) - else if n == 1 then - result = result + BigInt(2).pow(counter) - counter += 1 - ) - result - } - - def unwrapPaddingAndSlicing(expr: Expr): Expr = - expr match - case Extract(end, start, body) if start == 0 && end == 32 => unwrapPaddingAndSlicing(body) - case ZeroExtend(extension, body) => unwrapPaddingAndSlicing(body) - case _ => expr def edgesCallToEntry(call: DirectCall, entry: Procedure)(d: DL): Map[DL, EdgeFunction[TwoElement]] = d match @@ -115,14 +84,12 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem case Right(_) => Map(d -> IdEdge()) def edgesOther(n: CFGPosition)(d: DL): Map[DL, EdgeFunction[TwoElement]] = - val bitvecnegative: BigInt = new BigInt(new BigInteger("9223372036854775808")) // negative 64 bit integer - n match case LocalAssign(variable, rhs, maybeString) => val expr = unwrapPaddingAndSlicing(rhs) expr match case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) && arg1.equals(stackPointer) - && evaluateExpression(arg2, constProp(n)).isDefined && evaluateExpression(arg2, constProp(n)).get.value >= bitvecnegative => + && evaluateExpression(arg2, constProp(n)).isDefined && evaluateExpression(arg2, constProp(n)).get.value >= BITVECNEGATIVE => d match case Left(value) if value.accessor == variable => Map() case Left(value) => Map(d -> IdEdge()) diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 2420557bd..d567a2fc2 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -958,7 +958,8 @@ object RunUtils { s => writeToFile(toDot(ctx.program), s"${s}_ct.dot") ) - val b = Local(ctx.program.mainProcedure, analysisResult.last.symbolicAccessess, analysisResult.last.IRconstPropResult, ctx.globals, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo).analyze() + val b = Local(ctx.program.mainProcedure, analysisResult.last.symbolicAccessess, analysisResult.last.IRconstPropResult, ctx.globals, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, analysisResult.last.paramResults).analyze() + val c = DSA(ctx.program, analysisResult.last.symbolicAccessess, analysisResult.last.IRconstPropResult, ctx.globals, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, analysisResult.last.paramResults).analyze() Logger.info(s"[!] Finished indirect call resolution after $iteration iterations") StaticAnalysisContext( diff --git a/src/test/scala/LocalTest.scala b/src/test/scala/LocalTest.scala index 9ca42aad4..81be26fe9 100644 --- a/src/test/scala/LocalTest.scala +++ b/src/test/scala/LocalTest.scala @@ -25,16 +25,16 @@ class LocalTest extends AnyFunSuite, TestUtil { ) val dsg = results.analysis.get.dsg.get assert(dsg.pointTo.size == 9) - val framePointer = DSC(Some(DSN(None, None, 1)), 0) // R31 + val framePointer = DSC(Some(DSN(None, 0, 1)), 0) // R31 assert(dsg.pointTo(framePointer).equals(dsg.formals(R29)._1)) - val stack8 = DSC(Some(DSN(None, None, 2)), 0) // R31 + 8 + val stack8 = DSC(Some(DSN(None, 0, 2)), 0) // R31 + 8 assert(dsg.pointTo(stack8).equals(dsg.formals(R30)._1)) - val stack40 = DSC(Some(DSN(None, None, 3)), 0) // R31 + 40 - val stack32 = DSC(Some(DSN(None, None, 5)), 0) // R31 + 32 + val stack40 = DSC(Some(DSN(None, 0, 3)), 0) // R31 + 40 + val stack32 = DSC(Some(DSN(None, 0, 5)), 0) // R31 + 32 val stack24 = dsg.pointTo(stack32) // R31 + 24 and Malloc assert(stack24.node.get.collapsed) assert(dsg.pointTo(stack24).equals(stack24)) - assert(dsg.pointTo(stack40).equals(dsg.getPointee(dsg.getPointee(DSC(Some(DSN(None, None, 12)), 0))))) + assert(dsg.pointTo(stack40).equals(dsg.getPointee(dsg.getPointee(DSC(Some(DSN(None,0, 12)), 0))))) // assert(dsg.pointTo.contains(framePointer)) } From 24ecede4744d05b13d534c57739c50f7a0001d69 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Wed, 15 May 2024 11:53:18 +1000 Subject: [PATCH 010/104] bu phase --- src/main/scala/analysis/DSA.scala | 100 +++++++++++++++++- src/main/scala/analysis/DSAUtility.scala | 126 +++++++++++++++++++++-- src/main/scala/analysis/Local.scala | 56 +++++----- src/main/scala/ir/IRCursor.scala | 2 +- 4 files changed, 249 insertions(+), 35 deletions(-) diff --git a/src/main/scala/analysis/DSA.scala b/src/main/scala/analysis/DSA.scala index 6a967badd..3814366cf 100644 --- a/src/main/scala/analysis/DSA.scala +++ b/src/main/scala/analysis/DSA.scala @@ -1,6 +1,6 @@ package analysis -import ir.{BitVecLiteral, CFGPosition, Procedure, Program, Register, Variable} +import ir.{BitVecLiteral, BitVecType, CFGPosition, CallGraph, Procedure, Program, Register, Variable, computeDomain, end} import specification.{ExternalFunction, SpecGlobal} import scala.collection.mutable @@ -15,12 +15,104 @@ class DSA(program: Program, params: Map[Procedure, Set[Variable]] ) extends Analysis[Any] { - val DSGs : mutable.Map[Procedure, DSG] = mutable.Map() + val locals : mutable.Map[Procedure, DSG] = mutable.Map() + val bu: mutable.Map[Procedure, DSG] = mutable.Map() + + val stackPointer = Register("R31", BitVecType(64)) + val returnPointer = Register("R30", BitVecType(64)) + val framePointer = Register("R29", BitVecType(64)) + + val ignoreRegisters: Set[Variable] = Set(stackPointer, returnPointer, framePointer) + + def findLeaf(procedure: Procedure): Set[Procedure] = + if CallGraph.succ(procedure).isEmpty then + Set(procedure) + else + CallGraph.succ(procedure).foldLeft(Set[Procedure]()){ + (s, proc) => s ++ findLeaf(proc) + } + + def getCells(pos: CFGPosition, arg: Variable, graph: DSG): Set[(DSC, BigInt)] = + if reachingDefs(pos).contains(arg) then + reachingDefs(pos)(arg).foldLeft(Set[(DSC, BigInt)]()) { + (s, defintion) => + s + graph.varToCell(defintion)(arg) + } + else + Set(graph.formals(arg)) + + var visited = Set[Procedure]() + val queue = mutable.Queue[Procedure]() override def analyze(): Any = { - program.procedures.foreach( + val domain = computeDomain(CallGraph, Set(program.mainProcedure)) + domain.foreach( proc => val dsg = Local(proc, symResults, constProp, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params).analyze() - DSGs.update(proc, dsg) + locals.update(proc, dsg) + bu.update(proc, dsg.cloneSelf()) + ) + + val leafNodes = findLeaf(program.mainProcedure) + + leafNodes.foreach( + proc => + assert(locals(proc).callsites.isEmpty) + visited += proc + val preds = CallGraph.pred(proc) + queue.enqueueAll(CallGraph.pred(proc).diff(visited)) ) + + while queue.nonEmpty do + val proc = queue.dequeue() + val buGraph = bu(proc) + buGraph.callsites.foreach( // clone all the nodes first + callSite => + val callee = callSite.proc + val calleeGraph = locals(callee).cloneSelf() + assert(calleeGraph.formals.keySet.diff(ignoreRegisters).equals(callSite.paramCells.keySet)) + calleeGraph.formals.foreach{ + case (variable: Variable, (cell: DSC, internalOffset: BigInt)) if !ignoreRegisters.contains(variable) => + assert(callSite.paramCells.contains(variable)) + val node = cell.node.get + node.cloneNode(calleeGraph, buGraph) + case _ => + } + + assert(writesTo(callee).equals(callSite.returnCells.keySet)) + writesTo(callee).foreach( + reg => + assert(callSite.returnCells.contains(reg)) + val returnCells = calleeGraph.getCells(end(callee), reg) + assert(returnCells.nonEmpty) + returnCells.foreach{ + case (cell: DSC, internalOffset: BigInt) => + val node = cell.node.get + node.cloneNode(calleeGraph, buGraph) + } + ) + ) + buGraph.callsites.foreach(//unify nodes + callSite => + val callee = callSite.proc + val calleeGraph = locals(callee).cloneSelf() + calleeGraph.formals.foreach{ + case (variable: Variable, (cell: DSC, internalOffset: BigInt)) if !ignoreRegisters.contains(variable) => + buGraph.mergeCells(cell, callSite.paramCells(variable)) + case _ => + } + writesTo(callee).foreach( + reg => + val returnCells = calleeGraph.getCells(end(callee), reg) +// assert(returnCells.nonEmpty) + returnCells.foldLeft(callSite.returnCells(reg)){ + case (c: DSC, (cell: DSC, internalOffset: BigInt)) => + buGraph.mergeCells(c, cell) + } + ) + + ) + + + println(bu) } } diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index a5ad32e0c..0986574ea 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -171,12 +171,29 @@ class DSG(val proc: Procedure, if node.equals(oldCell.node.get) then stackMapping.update(offset, newCell.node.get) } + + private def replaceInCallSites(oldCell: DSC, newCell: DSC) = + callsites.foreach( + callSite => + callSite.returnCells.foreach{ + case (variable: Variable, cell: DSC) => + if cell.equals(oldCell) then + callSite.returnCells.update(variable, newCell) + } + + callSite.paramCells.foreach{ + case (variable: Variable, cell: DSC) => + if cell.equals(oldCell) then + callSite.paramCells.update(variable, newCell) + } + ) private def replace(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = replaceInEV(oldCell, newCell, internalOffsetChange) replaceInPointTo(oldCell, newCell) replaceInGlobals(oldCell, newCell) replaceInStack(oldCell, newCell) + replaceInCallSites(oldCell, newCell) def getPointee(cell: DSC): DSC = if !pointTo.contains(cell) then @@ -184,7 +201,14 @@ class DSG(val proc: Procedure, pointTo.update(cell, node.cells(0)) pointTo(cell) - + def getCells(pos: CFGPosition, arg: Variable): Set[(DSC, BigInt)] = + if reachingDefs(pos).contains(arg) then + reachingDefs(pos)(arg).foldLeft(Set[(DSC, BigInt)]()) { + (s, defintion) => + s + varToCell(defintion)(arg) + } + else + Set(formals(arg)) def collapseNode(node: DSN): Unit = val collapedCell = DSC(Option(node), 0) @@ -408,6 +432,75 @@ class DSG(val proc: Procedure, case _ => m } + def cloneSelf(): DSG = + val newGraph = DSG(proc, constProp, varToSym, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params) + assert(formals.size == newGraph.formals.size) + val idToNode: mutable.Map[Int, DSN] = mutable.Map() + formals.foreach{ + case (variable: Variable, (cell: DSC, internalOffset: BigInt)) => + assert(newGraph.formals.contains(variable)) + val node = cell.node.get + if !idToNode.contains(node.id) then + val newNode = node.cloneSelf(newGraph) + idToNode.update(node.id, newNode) + newGraph.formals.update(variable, (idToNode(node.id).cells(cell.offset), internalOffset)) + } + + varToCell.foreach { + case (position: CFGPosition, values: mutable.Map[Variable, (DSC, BigInt)]) => + assert(newGraph.varToCell.contains(position)) + values.foreach{ + case (variable: Variable, (cell: DSC, internalOffset: BigInt)) => + assert(newGraph.varToCell(position).contains(variable)) + val node = cell.node.get + if !idToNode.contains(node.id) then + val newNode = node.cloneSelf(newGraph) + idToNode.update(node.id, newNode) + newGraph.varToCell(position).update(variable, (idToNode(node.id).cells(cell.offset), internalOffset)) + } + } + + stackMapping.foreach{ + case (offset, node) => + assert(newGraph.stackMapping.contains(offset)) + if !idToNode.contains(node.id) then + val newNode = node.cloneSelf(newGraph) + idToNode.update(node.id, newNode) + newGraph.stackMapping.update(offset, idToNode(node.id)) + } + + globalMapping.foreach { + case ((start: BigInt, end: BigInt), (node: DSN, internalOffset: BigInt)) => + assert(newGraph.globalMapping.contains((start, end))) + if !idToNode.contains(node.id) then + val newNode = node.cloneSelf(newGraph) + idToNode.update(node.id, newNode) + newGraph.globalMapping.update((start, end), (idToNode(node.id), internalOffset)) + } + + callsites.foreach( + callSite => + val cs = CallSite(callSite.call, newGraph) + newGraph.callsites.add(cs) + assert(cs.paramCells.keySet.equals(callSite.paramCells.keySet)) + callSite.paramCells.foreach{ + case (variable: Variable, cell: DSC) => + assert(cs.paramCells.contains(variable)) + val id = cell.node.get.id + cs.paramCells.update(variable, idToNode(id).cells(cell.offset)) + } + + callSite.returnCells.foreach{ + case (variable: Variable, cell: DSC) => + assert(cs.returnCells.contains(variable)) + val id = cell.node.get.id + cs.returnCells.update(variable, idToNode(id).cells(cell.offset)) + } + ) + + newGraph.nodes.addAll(idToNode.values) + newGraph + } class Flags() { @@ -490,6 +583,27 @@ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCount cells(offset).growSize(size) cells(offset) + def cloneSelf(graph: DSG) : DSN = + val node = DSN(Some(graph), this.size) + node.allocationRegions.addAll(this.allocationRegions) + node.flags.join(this.flags) + cells.foreach{ + case (offset: BigInt, cell: DSC) => + node.addCell(offset, cell.largestAccessedSize) + } + node + + def cloneNode(from: DSG, to: DSG): Unit = + assert(from.equals(graph.get)) + if !to.nodes.contains(this) then + to.nodes.add(this) + cells.foreach { + case (offset: BigInt, cell: DSC) => + if from.pointTo.contains(cell) then + val pointee = from.getPointee(cell) + pointee.node.get.cloneNode(to,from) + to.pointTo.update(cell, pointee) + } override def equals(obj: Any): Boolean = obj match @@ -517,23 +631,23 @@ case class DSC(node: Option[DSN], offset: BigInt) class CallSite(val call: DirectCall, val graph: DSG) { val proc = call.target - val paramCells: Map[Variable, DSC] = graph.params(proc).foldLeft(Map[Variable, DSC]()) { + val paramCells: mutable.Map[Variable, DSC] = graph.params(proc).foldLeft(mutable.Map[Variable, DSC]()) { (m, reg) => val node = DSN(Some(graph)) node.flags.incomplete = true - m + (reg -> node.cells(0)) + m += (reg -> node.cells(0)) } - val returnCells: Map[Variable, DSC] = graph.writesTo(proc).foldLeft(Map[Variable, DSC]()) { + val returnCells: mutable.Map[Variable, DSC] = graph.writesTo(proc).foldLeft(mutable.Map[Variable, DSC]()) { (m, reg) => val node = DSN(Some(graph)) node.flags.incomplete = true - m + (reg -> node.cells(0)) + m += (reg -> node.cells(0)) } } def unwrapPaddingAndSlicing(expr: Expr): Expr = expr match - case Extract(end, start, body) if start == 0 && end == 32 => unwrapPaddingAndSlicing(body) + case Extract(end, start, body) /*if start == 0 && end == 32*/ => unwrapPaddingAndSlicing(body) // this may make it unsound case ZeroExtend(extension, body) => unwrapPaddingAndSlicing(body) case _ => expr diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index 708dc5f55..f4ed35139 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -87,14 +87,14 @@ class Local( else None - def getCells(pos: CFGPosition, arg: Variable): Set[(DSC, BigInt)] = - if reachingDefs(pos).contains(arg) then - reachingDefs(pos)(arg).foldLeft(Set[(DSC, BigInt)]()) { - (s, defintion) => - s + graph.varToCell(defintion)(arg) - } - else - Set(graph.formals(arg)) +// def getCells(pos: CFGPosition, arg: Variable): Set[(DSC, BigInt)] = +// if reachingDefs(pos).contains(arg) then +// reachingDefs(pos)(arg).foldLeft(Set[(DSC, BigInt)]()) { +// (s, defintion) => +// s + graph.varToCell(defintion)(arg) +// } +// else +// Set(graph.formals(arg)) @@ -114,7 +114,7 @@ class Local( // visit all the defining pointer operation on rhs variable first reachingDefs(position)(rhs).foreach(visit) // get the cells of all the SSA variables in the set - val cells: Set[(DSC, BigInt)] = getCells(position, rhs) + val cells: Set[(DSC, BigInt)] = graph.getCells(position, rhs) // merge the cells or their pointees with lhs var result = cells.foldLeft(lhs) { (c, t) => @@ -179,6 +179,7 @@ class Local( val returnArgument = graph.varToCell(n)(variable)._1 graph.mergeCells(returnArgument, cell) } + print("") case LocalAssign(variable, rhs, maybeString) => val expr: Expr = unwrapPaddingAndSlicing(rhs) val lhsCell = graph.varToCell(n)(variable)._1 @@ -239,7 +240,7 @@ class Local( if containsPointer then val cell = expr.variables.foldLeft(lhsCell) { (c, v) => - val cells: Set[(DSC, BigInt)] = getCells(n, v) + val cells: Set[(DSC, BigInt)] = graph.getCells(n, v) cells.foldLeft(c) { (c, p) => @@ -270,7 +271,7 @@ class Local( case _ => ??? addressPointee.node.get.flags.modified = true - val valueCells = getCells(n, value) + val valueCells = graph.getCells(n, value) val result = valueCells.foldLeft(addressPointee) { (c, p) => graph.mergeCells(p._1, c) @@ -279,21 +280,28 @@ class Local( case _ => } def analyze(): DSG = - val domain = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).reverse + val domain = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString) domain.foreach(visit) - println(graph.formals) - val results = graph.varToCell.keys.toSeq.sortBy(_.toShortString) - results.foreach { - pos => - println(pos) - val tab = " " - graph.varToCell(pos).foreach { - case (variable, cell) => - println(tab + variable.toString + " -> " + cell.toString) - } - } - println(graph.pointTo) +// println(graph.formals) +// val results = graph.varToCell.keys.toSeq.sortBy(_.toShortString) +// results.foreach { +// pos => +// println(pos) +// val tab = " " +// graph.varToCell(pos).foreach { +// case (variable, cell) => +// println(tab + variable.toString + " -> " + cell.toString) +// } +// } +// println(graph.pointTo) +// // collect the nodes in the dsg + graph.nodes.addAll(graph.formals.values.map(_._1.node.get)) + graph.varToCell.values.foreach( + value => graph.nodes.addAll(value.values.map(_._1.node.get)) + ) + graph.nodes.addAll(graph.stackMapping.values) + graph.nodes.addAll(graph.globalMapping.values.map(_._1)) graph } diff --git a/src/main/scala/ir/IRCursor.scala b/src/main/scala/ir/IRCursor.scala index 083216250..1be170f8b 100644 --- a/src/main/scala/ir/IRCursor.scala +++ b/src/main/scala/ir/IRCursor.scala @@ -185,7 +185,7 @@ object InterProcIRCursor extends InterProcIRCursor trait CallGraph extends IRWalk[Procedure, Procedure] { final def succ(b: Procedure): Set[Procedure] = b.calls - final def pred(b: Procedure): Set[Procedure] = b.incomingCalls().map(_.target).toSet + final def pred(b: Procedure): Set[Procedure] = b.incomingCalls().map(_.parent.parent).toSet } object CallGraph extends CallGraph From fbee14c6e5a328a19c0d3c991518dfdef4915b77 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Thu, 16 May 2024 12:03:46 +1000 Subject: [PATCH 011/104] td phase --- .../interproc_pointer_arithmetic.adt | 609 ++++++++++++++++++ .../interproc_pointer_arithmetic.bir | 279 ++++++++ .../interproc_pointer_arithmetic.c | 14 + .../interproc_pointer_arithmetic.relf | 123 ++++ .../unsafe_pointer_arithmetic.adt | 586 +++++++++++++++++ .../unsafe_pointer_arithmetic.bir | 268 ++++++++ .../unsafe_pointer_arithmetic.c | 12 + .../unsafe_pointer_arithmetic.relf | 122 ++++ src/main/scala/analysis/DSA.scala | 142 +++- src/main/scala/analysis/DSAUtility.scala | 179 +++-- src/main/scala/analysis/Local.scala | 47 +- src/main/scala/util/RunUtils.scala | 18 +- src/test/scala/LocalTest.scala | 260 +++++++- 13 files changed, 2542 insertions(+), 117 deletions(-) create mode 100755 examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt create mode 100755 examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.bir create mode 100644 examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.c create mode 100755 examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf create mode 100644 examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.adt create mode 100644 examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.bir create mode 100644 examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.c create mode 100644 examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.relf diff --git a/examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt b/examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt new file mode 100755 index 000000000..b3975b7b4 --- /dev/null +++ b/examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt @@ -0,0 +1,609 @@ +Project(Attrs([Attr("filename","interproc_pointer_arithmetic.out"), +Attr("image-specification","(declare abi (name str))\n(declare arch (name str))\n(declare base-address (addr int))\n(declare bias (off int))\n(declare bits (size int))\n(declare code-region (addr int) (size int) (off int))\n(declare code-start (addr int))\n(declare entry-point (addr int))\n(declare external-reference (addr int) (name str))\n(declare format (name str))\n(declare is-executable (flag bool))\n(declare is-little-endian (flag bool))\n(declare llvm:base-address (addr int))\n(declare llvm:code-entry (name str) (off int) (size int))\n(declare llvm:coff-import-library (name str))\n(declare llvm:coff-virtual-section-header (name str) (addr int) (size int))\n(declare llvm:elf-program-header (name str) (off int) (size int))\n(declare llvm:elf-program-header-flags (name str) (ld bool) (r bool) \n (w bool) (x bool))\n(declare llvm:elf-virtual-program-header (name str) (addr int) (size int))\n(declare llvm:entry-point (addr int))\n(declare llvm:macho-symbol (name str) (value int))\n(declare llvm:name-reference (at int) (name str))\n(declare llvm:relocation (at int) (addr int))\n(declare llvm:section-entry (name str) (addr int) (size int) (off int))\n(declare llvm:section-flags (name str) (r bool) (w bool) (x bool))\n(declare llvm:segment-command (name str) (off int) (size int))\n(declare llvm:segment-command-flags (name str) (r bool) (w bool) (x bool))\n(declare llvm:symbol-entry (name str) (addr int) (size int) (off int)\n (value int))\n(declare llvm:virtual-segment-command (name str) (addr int) (size int))\n(declare mapped (addr int) (size int) (off int))\n(declare named-region (addr int) (size int) (name str))\n(declare named-symbol (addr int) (name str))\n(declare require (name str))\n(declare section (addr int) (size int))\n(declare segment (addr int) (size int) (r bool) (w bool) (x bool))\n(declare subarch (name str))\n(declare symbol-chunk (addr int) (size int) (root int))\n(declare symbol-value (addr int) (value int))\n(declare system (name str))\n(declare vendor (name str))\n\n(abi unknown)\n(arch aarch64)\n(base-address 0)\n(bias 0)\n(bits 64)\n(code-region 2000 20 2000)\n(code-region 1600 400 1600)\n(code-region 1488 112 1488)\n(code-region 1464 24 1464)\n(code-start 1652)\n(code-start 1600)\n(code-start 1908)\n(code-start 1876)\n(entry-point 1600)\n(external-reference 131032 _ITM_deregisterTMCloneTable)\n(external-reference 131040 __cxa_finalize)\n(external-reference 131048 __gmon_start__)\n(external-reference 131064 _ITM_registerTMCloneTable)\n(external-reference 130984 __libc_start_main)\n(external-reference 130992 __cxa_finalize)\n(external-reference 131000 malloc)\n(external-reference 131008 __gmon_start__)\n(external-reference 131016 abort)\n(format elf)\n(is-executable true)\n(is-little-endian true)\n(llvm:base-address 0)\n(llvm:code-entry abort 0 0)\n(llvm:code-entry malloc 0 0)\n(llvm:code-entry __cxa_finalize 0 0)\n(llvm:code-entry __libc_start_main 0 0)\n(llvm:code-entry _init 1464 0)\n(llvm:code-entry callee 1876 32)\n(llvm:code-entry main 1908 92)\n(llvm:code-entry _start 1600 52)\n(llvm:code-entry abort@GLIBC_2.17 0 0)\n(llvm:code-entry malloc@GLIBC_2.17 0 0)\n(llvm:code-entry _fini 2000 0)\n(llvm:code-entry __cxa_finalize@GLIBC_2.17 0 0)\n(llvm:code-entry __libc_start_main@GLIBC_2.34 0 0)\n(llvm:code-entry frame_dummy 1872 0)\n(llvm:code-entry __do_global_dtors_aux 1792 0)\n(llvm:code-entry register_tm_clones 1728 0)\n(llvm:code-entry deregister_tm_clones 1680 0)\n(llvm:code-entry call_weak_fn 1652 20)\n(llvm:code-entry .fini 2000 20)\n(llvm:code-entry .text 1600 400)\n(llvm:code-entry .plt 1488 112)\n(llvm:code-entry .init 1464 24)\n(llvm:elf-program-header 08 64912 624)\n(llvm:elf-program-header 07 0 0)\n(llvm:elf-program-header 06 2024 68)\n(llvm:elf-program-header 05 596 68)\n(llvm:elf-program-header 04 64928 496)\n(llvm:elf-program-header 03 64912 640)\n(llvm:elf-program-header 02 0 2292)\n(llvm:elf-program-header 01 568 27)\n(llvm:elf-program-header 00 64 504)\n(llvm:elf-program-header-flags 08 false true false false)\n(llvm:elf-program-header-flags 07 false true true false)\n(llvm:elf-program-header-flags 06 false true false false)\n(llvm:elf-program-header-flags 05 false true false false)\n(llvm:elf-program-header-flags 04 false true true false)\n(llvm:elf-program-header-flags 03 true true true false)\n(llvm:elf-program-header-flags 02 true true false true)\n(llvm:elf-program-header-flags 01 false true false false)\n(llvm:elf-program-header-flags 00 false true false false)\n(llvm:elf-virtual-program-header 08 130448 624)\n(llvm:elf-virtual-program-header 07 0 0)\n(llvm:elf-virtual-program-header 06 2024 68)\n(llvm:elf-virtual-program-header 05 596 68)\n(llvm:elf-virtual-program-header 04 130464 496)\n(llvm:elf-virtual-program-header 03 130448 648)\n(llvm:elf-virtual-program-header 02 0 2292)\n(llvm:elf-virtual-program-header 01 568 27)\n(llvm:elf-virtual-program-header 00 64 504)\n(llvm:entry-point 1600)\n(llvm:name-reference 131016 abort)\n(llvm:name-reference 131008 __gmon_start__)\n(llvm:name-reference 131000 malloc)\n(llvm:name-reference 130992 __cxa_finalize)\n(llvm:name-reference 130984 __libc_start_main)\n(llvm:name-reference 131064 _ITM_registerTMCloneTable)\n(llvm:name-reference 131048 __gmon_start__)\n(llvm:name-reference 131040 __cxa_finalize)\n(llvm:name-reference 131032 _ITM_deregisterTMCloneTable)\n(llvm:section-entry .shstrtab 0 250 68301)\n(llvm:section-entry .strtab 0 589 67712)\n(llvm:section-entry .symtab 0 2112 65600)\n(llvm:section-entry .comment 0 43 65552)\n(llvm:section-entry .bss 131088 8 65552)\n(llvm:section-entry .data 131072 16 65536)\n(llvm:section-entry .got 130960 112 65424)\n(llvm:section-entry .dynamic 130464 496 64928)\n(llvm:section-entry .fini_array 130456 8 64920)\n(llvm:section-entry .init_array 130448 8 64912)\n(llvm:section-entry .eh_frame 2096 196 2096)\n(llvm:section-entry .eh_frame_hdr 2024 68 2024)\n(llvm:section-entry .rodata 2020 4 2020)\n(llvm:section-entry .fini 2000 20 2000)\n(llvm:section-entry .text 1600 400 1600)\n(llvm:section-entry .plt 1488 112 1488)\n(llvm:section-entry .init 1464 24 1464)\n(llvm:section-entry .rela.plt 1344 120 1344)\n(llvm:section-entry .rela.dyn 1152 192 1152)\n(llvm:section-entry .gnu.version_r 1104 48 1104)\n(llvm:section-entry .gnu.version 1084 20 1084)\n(llvm:section-entry .dynstr 936 148 936)\n(llvm:section-entry .dynsym 696 240 696)\n(llvm:section-entry .gnu.hash 664 28 664)\n(llvm:section-entry .note.ABI-tag 632 32 632)\n(llvm:section-entry .note.gnu.build-id 596 36 596)\n(llvm:section-entry .interp 568 27 568)\n(llvm:section-flags .shstrtab true false false)\n(llvm:section-flags .strtab true false false)\n(llvm:section-flags .symtab true false false)\n(llvm:section-flags .comment true false false)\n(llvm:section-flags .bss true true false)\n(llvm:section-flags .data true true false)\n(llvm:section-flags .got true true false)\n(llvm:section-flags .dynamic true true false)\n(llvm:section-flags .fini_array true true false)\n(llvm:section-flags .init_array true true false)\n(llvm:section-flags .eh_frame true false false)\n(llvm:section-flags .eh_frame_hdr true false false)\n(llvm:section-flags .rodata true false false)\n(llvm:section-flags .fini true false true)\n(llvm:section-flags .text true false true)\n(llvm:section-flags .plt true false true)\n(llvm:section-flags .init true false true)\n(llvm:section-flags .rela.plt true false false)\n(llvm:section-flags .rela.dyn true false false)\n(llvm:section-flags .gnu.version_r true false false)\n(llvm:section-flags .gnu.version true false false)\n(llvm:section-flags .dynstr true false false)\n(llvm:section-flags .dynsym true false false)\n(llvm:section-flags .gnu.hash true false false)\n(llvm:section-flags .note.ABI-tag true false false)\n(llvm:section-flags .note.gnu.build-id true false false)\n(llvm:section-flags .interp true false false)\n(llvm:symbol-entry abort 0 0 0 0)\n(llvm:symbol-entry malloc 0 0 0 0)\n(llvm:symbol-entry __cxa_finalize 0 0 0 0)\n(llvm:symbol-entry __libc_start_main 0 0 0 0)\n(llvm:symbol-entry _init 1464 0 1464 1464)\n(llvm:symbol-entry callee 1876 32 1876 1876)\n(llvm:symbol-entry main 1908 92 1908 1908)\n(llvm:symbol-entry _start 1600 52 1600 1600)\n(llvm:symbol-entry abort@GLIBC_2.17 0 0 0 0)\n(llvm:symbol-entry malloc@GLIBC_2.17 0 0 0 0)\n(llvm:symbol-entry _fini 2000 0 2000 2000)\n(llvm:symbol-entry __cxa_finalize@GLIBC_2.17 0 0 0 0)\n(llvm:symbol-entry __libc_start_main@GLIBC_2.34 0 0 0 0)\n(llvm:symbol-entry frame_dummy 1872 0 1872 1872)\n(llvm:symbol-entry __do_global_dtors_aux 1792 0 1792 1792)\n(llvm:symbol-entry register_tm_clones 1728 0 1728 1728)\n(llvm:symbol-entry deregister_tm_clones 1680 0 1680 1680)\n(llvm:symbol-entry call_weak_fn 1652 20 1652 1652)\n(mapped 0 2292 0)\n(mapped 130448 640 64912)\n(named-region 0 2292 02)\n(named-region 130448 648 03)\n(named-region 568 27 .interp)\n(named-region 596 36 .note.gnu.build-id)\n(named-region 632 32 .note.ABI-tag)\n(named-region 664 28 .gnu.hash)\n(named-region 696 240 .dynsym)\n(named-region 936 148 .dynstr)\n(named-region 1084 20 .gnu.version)\n(named-region 1104 48 .gnu.version_r)\n(named-region 1152 192 .rela.dyn)\n(named-region 1344 120 .rela.plt)\n(named-region 1464 24 .init)\n(named-region 1488 112 .plt)\n(named-region 1600 400 .text)\n(named-region 2000 20 .fini)\n(named-region 2020 4 .rodata)\n(named-region 2024 68 .eh_frame_hdr)\n(named-region 2096 196 .eh_frame)\n(named-region 130448 8 .init_array)\n(named-region 130456 8 .fini_array)\n(named-region 130464 496 .dynamic)\n(named-region 130960 112 .got)\n(named-region 131072 16 .data)\n(named-region 131088 8 .bss)\n(named-region 0 43 .comment)\n(named-region 0 2112 .symtab)\n(named-region 0 589 .strtab)\n(named-region 0 250 .shstrtab)\n(named-symbol 1652 call_weak_fn)\n(named-symbol 1680 deregister_tm_clones)\n(named-symbol 1728 register_tm_clones)\n(named-symbol 1792 __do_global_dtors_aux)\n(named-symbol 1872 frame_dummy)\n(named-symbol 0 __libc_start_main@GLIBC_2.34)\n(named-symbol 0 __cxa_finalize@GLIBC_2.17)\n(named-symbol 2000 _fini)\n(named-symbol 0 malloc@GLIBC_2.17)\n(named-symbol 0 abort@GLIBC_2.17)\n(named-symbol 1600 _start)\n(named-symbol 1908 main)\n(named-symbol 1876 callee)\n(named-symbol 1464 _init)\n(named-symbol 0 __libc_start_main)\n(named-symbol 0 __cxa_finalize)\n(named-symbol 0 malloc)\n(named-symbol 0 abort)\n(require libc.so.6)\n(section 568 27)\n(section 596 36)\n(section 632 32)\n(section 664 28)\n(section 696 240)\n(section 936 148)\n(section 1084 20)\n(section 1104 48)\n(section 1152 192)\n(section 1344 120)\n(section 1464 24)\n(section 1488 112)\n(section 1600 400)\n(section 2000 20)\n(section 2020 4)\n(section 2024 68)\n(section 2096 196)\n(section 130448 8)\n(section 130456 8)\n(section 130464 496)\n(section 130960 112)\n(section 131072 16)\n(section 131088 8)\n(section 0 43)\n(section 0 2112)\n(section 0 589)\n(section 0 250)\n(segment 0 2292 true false true)\n(segment 130448 648 true true false)\n(subarch v8)\n(symbol-chunk 1652 20 1652)\n(symbol-chunk 1600 52 1600)\n(symbol-chunk 1908 92 1908)\n(symbol-chunk 1876 32 1876)\n(symbol-value 1652 1652)\n(symbol-value 1680 1680)\n(symbol-value 1728 1728)\n(symbol-value 1792 1792)\n(symbol-value 1872 1872)\n(symbol-value 2000 2000)\n(symbol-value 1600 1600)\n(symbol-value 1908 1908)\n(symbol-value 1876 1876)\n(symbol-value 1464 1464)\n(symbol-value 0 0)\n(system \"\")\n(vendor \"\")\n"), +Attr("abi-name","aarch64-linux-gnu-elf")]), +Sections([Section(".shstrtab", 0x0, "\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\xb7\x00\x01\x00\x00\x00\x40\x06\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\xc8\x0b\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x40\x00\x38\x00\x09\x00\x40\x00\x1c\x00\x1b\x00\x06\x00\x00\x00\x04\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\xf8\x01\x00\x00\x00\x00\x00\x00\xf8\x01\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf4\x08\x00\x00\x00\x00\x00\x00\xf4\x08\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x06\x00\x00\x00\x90\xfd\x00\x00\x00\x00\x00\x00\x90\xfd"), +Section(".strtab", 0x0, "\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\xb7\x00\x01\x00\x00\x00\x40\x06\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\xc8\x0b\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x40\x00\x38\x00\x09\x00\x40\x00\x1c\x00\x1b\x00\x06\x00\x00\x00\x04\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\xf8\x01\x00\x00\x00\x00\x00\x00\xf8\x01\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf4\x08\x00\x00\x00\x00\x00\x00\xf4\x08\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x06\x00\x00\x00\x90\xfd\x00\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x80\x02\x00\x00\x00\x00\x00\x00\x88\x02\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x06\x00\x00\x00\xa0\xfd\x00\x00\x00\x00\x00\x00\xa0\xfd\x01\x00\x00\x00\x00\x00\xa0\xfd\x01\x00\x00\x00\x00\x00\xf0\x01\x00\x00\x00\x00\x00\x00\xf0\x01\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00\x54\x02\x00\x00\x00\x00\x00\x00\x54\x02\x00\x00\x00\x00\x00\x00\x54\x02\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x50\xe5\x74\x64\x04\x00\x00\x00\xe8\x07\x00\x00\x00\x00\x00\x00\xe8\x07\x00\x00\x00\x00\x00\x00\xe8\x07\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x51\xe5\x74\x64\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x52\xe5\x74\x64\x04\x00\x00\x00\x90\xfd\x00\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x70\x02\x00\x00\x00\x00\x00\x00\x70\x02\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x2f\x6c\x69\x62\x2f\x6c\x64\x2d\x6c\x69\x6e\x75\x78\x2d\x61\x61\x72\x63\x68\x36\x34"), +Section(".symtab", 0x0, "\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\xb7\x00\x01\x00\x00\x00\x40\x06\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\xc8\x0b\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x40\x00\x38\x00\x09\x00\x40\x00\x1c\x00\x1b\x00\x06\x00\x00\x00\x04\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\xf8\x01\x00\x00\x00\x00\x00\x00\xf8\x01\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf4\x08\x00\x00\x00\x00\x00\x00\xf4\x08\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x06\x00\x00\x00\x90\xfd\x00\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x80\x02\x00\x00\x00\x00\x00\x00\x88\x02\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x06\x00\x00\x00\xa0\xfd\x00\x00\x00\x00\x00\x00\xa0\xfd\x01\x00\x00\x00\x00\x00\xa0\xfd\x01\x00\x00\x00\x00\x00\xf0\x01\x00\x00\x00\x00\x00\x00\xf0\x01\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00\x54\x02\x00\x00\x00\x00\x00\x00\x54\x02\x00\x00\x00\x00\x00\x00\x54\x02\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x50\xe5\x74\x64\x04\x00\x00\x00\xe8\x07\x00\x00\x00\x00\x00\x00\xe8\x07\x00\x00\x00\x00\x00\x00\xe8\x07\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x51\xe5\x74\x64\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x52\xe5\x74\x64\x04\x00\x00\x00\x90\xfd\x00\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x70\x02\x00\x00\x00\x00\x00\x00\x70\x02\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x2f\x6c\x69\x62\x2f\x6c\x64\x2d\x6c\x69\x6e\x75\x78\x2d\x61\x61\x72\x63\x68\x36\x34\x2e\x73\x6f\x2e\x31\x00\x00\x04\x00\x00\x00\x14\x00\x00\x00\x03\x00\x00\x00\x47\x4e\x55\x00\x56\x39\x03\xc2\xe1\x5e\x80\x94\xce\xd1\x56\x3a\x67\x2f\x13\x1f\x3c\x67\x44\x5e\x04\x00\x00\x00\x10\x00\x00\x00\x01\x00\x00\x00\x47\x4e\x55\x00\x00\x00\x00\x00\x03\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x0b\x00\xb8\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x16\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x4f\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1a\x00\x00\x00\x22\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x6b\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x29\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7a\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x6d\x61\x6c\x6c\x6f\x63\x00\x5f\x5f\x6c\x69\x62\x63\x5f\x73\x74\x61\x72\x74\x5f\x6d\x61\x69\x6e\x00\x5f\x5f\x63\x78\x61\x5f\x66\x69\x6e\x61\x6c\x69\x7a\x65\x00\x61\x62\x6f\x72\x74\x00\x6c\x69\x62\x63\x2e\x73\x6f\x2e\x36\x00\x47\x4c\x49\x42\x43\x5f\x32\x2e\x31\x37\x00\x47\x4c\x49\x42\x43\x5f\x32\x2e\x33\x34\x00\x5f\x49\x54\x4d\x5f\x64\x65\x72\x65\x67\x69\x73\x74\x65\x72\x54\x4d\x43\x6c\x6f\x6e\x65\x54\x61\x62\x6c\x65\x00\x5f\x5f\x67\x6d\x6f\x6e\x5f\x73\x74\x61\x72\x74\x5f\x5f\x00\x5f\x49\x54\x4d\x5f\x72\x65\x67\x69\x73\x74\x65\x72\x54\x4d\x43\x6c\x6f\x6e\x65\x54\x61\x62\x6c\x65\x00\x00\x00\x00\x00\x00\x00\x02\x00\x01\x00\x03\x00\x03\x00\x01\x00\x03\x00\x01\x00\x01\x00\x02\x00\x2f\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x97\x91\x96\x06\x00\x00\x03\x00\x39\x00\x00\x00\x10\x00\x00\x00\xb4\x91\x96\x06\x00\x00\x02\x00\x44\x00\x00\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x50\x07\x00\x00\x00\x00\x00\x00\x98\xfd\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\xf0\xff\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x74\x07\x00\x00\x00\x00\x00\x00\x08\x00\x02\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x08\x00\x02\x00\x00\x00\x00\x00\xd8\xff\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe0\xff\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe8\xff\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf8\xff\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa8\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb0\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb8\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc0\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc8\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\x20\x03\xd5\xfd\x7b\xbf\xa9\xfd\x03\x00\x91\x2c\x00\x00\x94\xfd\x7b\xc1\xa8\xc0\x03\x5f\xd6\xf0\x7b\xbf\xa9\xf0\x00\x00\xf0\x11\xd2\x47\xf9\x10\x82\x3e\x91\x20\x02\x1f\xd6\x1f\x20\x03\xd5\x1f\x20\x03\xd5\x1f\x20\x03\xd5\xf0\x00\x00\xf0\x11\xd6\x47\xf9\x10\xa2\x3e\x91\x20\x02\x1f\xd6\xf0\x00\x00\xf0\x11\xda\x47\xf9\x10\xc2\x3e\x91\x20\x02\x1f\xd6\xf0\x00\x00\xf0\x11\xde\x47\xf9\x10\xe2\x3e\x91\x20\x02\x1f\xd6\xf0\x00\x00\xf0\x11\xe2\x47\xf9\x10\x02\x3f\x91\x20\x02\x1f\xd6\xf0\x00\x00\xf0\x11\xe6\x47\xf9\x10\x22\x3f\x91\x20\x02\x1f\xd6\x1f\x20\x03\xd5\x1d\x00\x80\xd2\x1e\x00\x80\xd2\xe5\x03\x00\xaa\xe1\x03\x40\xf9\xe2\x23\x00\x91\xe6\x03\x00\x91\xe0\x00\x00\xf0\x00\xf8\x47\xf9\x03\x00\x80\xd2\x04\x00\x80\xd2\xe1\xff\xff\x97\xf0\xff\xff\x97\xe0\x00\x00\xf0\x00\xf4\x47\xf9\x40\x00\x00\xb4\xe8\xff\xff\x17\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\x1f\x20\x03\xd5\x00\x01\x00\x90\x00\x40\x00\x91\x01\x01\x00\x90\x21\x40\x00\x91\x3f\x00\x00\xeb\xc0\x00\x00\x54\xe1\x00\x00\xf0\x21\xec\x47\xf9\x61\x00\x00\xb4\xf0\x03\x01\xaa\x00\x02\x1f\xd6\xc0\x03\x5f\xd6\x00\x01\x00\x90\x00\x40\x00\x91\x01\x01\x00\x90\x21\x40\x00\x91\x21\x00\x00\xcb\x22\xfc\x7f\xd3\x41\x0c\x81\x8b\x21\xfc\x41\x93\xc1\x00\x00\xb4\xe2\x00\x00\xf0\x42\xfc\x47\xf9\x62\x00\x00\xb4\xf0\x03\x02\xaa\x00\x02\x1f\xd6\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\xfd\x7b\xbe\xa9\xfd\x03\x00\x91\xf3\x0b\x00\xf9\x13\x01\x00\x90\x60\x42\x40\x39\x40\x01\x00\x35\xe0\x00\x00\xf0\x00\xf0\x47\xf9\x80\x00\x00\xb4\x00\x01\x00\x90\x00\x04\x40\xf9\xb5\xff\xff\x97\xd8\xff\xff\x97\x20\x00\x80\x52\x60\x42\x00\x39\xf3\x0b\x40\xf9\xfd\x7b\xc2\xa8\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\x1f\x20\x03\xd5\xdc\xff\xff\x17\xff\x83\x00\xd1\xe0\x07\x00\xf9\xe0\x07\x40\xf9\x00\x40\x00\x91\xe0\x0f\x00\xf9\xe0\x0f\x40\xf9\xff\x83\x00\x91\xc0\x03\x5f\xd6\xfd\x7b\xbd\xa9\xfd\x03\x00\x91\x80\x02\x80\xd2\xa4\xff\xff\x97\xe0\x0f\x00\xf9\xe0\x0f\x40\xf9\x81\x01\x80\x52\x01\x00\x00\xb9\xe0\x0f\x40\xf9\x00\x40\x00\x91\xe0\x13\x00\xf9\xe0\x13\x40\xf9\xa1\x01\x80\x52\x01\x00\x00\xb9\xe0\x13\x40\xf9\xe9\xff\xff\x97\xe0\x17\x00\xf9\xe0\x17\x40\xf9\xc1\x01\x80\x52\x01\x00\x00\xb9\x00\x00\x80\x52\xfd\x7b\xc3\xa8\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\xfd\x7b\xbf\xa9\xfd\x03\x00\x91\xfd\x7b\xc1\xa8\xc0\x03\x5f\xd6\x01\x00\x02\x00\x01\x1b\x03\x3b\x44\x00\x00\x00\x07\x00\x00\x00\x58\xfe\xff\xff\x5c\x00\x00\x00\xa8\xfe\xff\xff\x70\x00\x00\x00\xd8\xfe\xff\xff\x84\x00\x00\x00\x18\xff\xff\xff\x98\x00\x00\x00\x68\xff\xff\xff\xbc\x00\x00\x00\x6c\xff\xff\xff\xd0\x00\x00\x00\x8c\xff\xff\xff\xe8\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x01\x7a\x52\x00\x04\x78\x1e\x01"), +Section(".comment", 0x0, "\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\xb7\x00\x01\x00\x00\x00\x40\x06\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\xc8\x0b\x01"), +Section(".interp", 0x238, "\x2f\x6c\x69\x62\x2f\x6c\x64\x2d\x6c\x69\x6e\x75\x78\x2d\x61\x61\x72\x63\x68\x36\x34\x2e\x73\x6f\x2e\x31\x00"), +Section(".note.gnu.build-id", 0x254, "\x04\x00\x00\x00\x14\x00\x00\x00\x03\x00\x00\x00\x47\x4e\x55\x00\x56\x39\x03\xc2\xe1\x5e\x80\x94\xce\xd1\x56\x3a\x67\x2f\x13\x1f\x3c\x67\x44\x5e"), +Section(".note.ABI-tag", 0x278, "\x04\x00\x00\x00\x10\x00\x00\x00\x01\x00\x00\x00\x47\x4e\x55\x00\x00\x00\x00\x00\x03\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00"), +Section(".gnu.hash", 0x298, "\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".dynsym", 0x2B8, "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x0b\x00\xb8\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x16\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x4f\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1a\x00\x00\x00\x22\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x6b\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x29\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7a\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".dynstr", 0x3A8, "\x00\x6d\x61\x6c\x6c\x6f\x63\x00\x5f\x5f\x6c\x69\x62\x63\x5f\x73\x74\x61\x72\x74\x5f\x6d\x61\x69\x6e\x00\x5f\x5f\x63\x78\x61\x5f\x66\x69\x6e\x61\x6c\x69\x7a\x65\x00\x61\x62\x6f\x72\x74\x00\x6c\x69\x62\x63\x2e\x73\x6f\x2e\x36\x00\x47\x4c\x49\x42\x43\x5f\x32\x2e\x31\x37\x00\x47\x4c\x49\x42\x43\x5f\x32\x2e\x33\x34\x00\x5f\x49\x54\x4d\x5f\x64\x65\x72\x65\x67\x69\x73\x74\x65\x72\x54\x4d\x43\x6c\x6f\x6e\x65\x54\x61\x62\x6c\x65\x00\x5f\x5f\x67\x6d\x6f\x6e\x5f\x73\x74\x61\x72\x74\x5f\x5f\x00\x5f\x49\x54\x4d\x5f\x72\x65\x67\x69\x73\x74\x65\x72\x54\x4d\x43\x6c\x6f\x6e\x65\x54\x61\x62\x6c\x65\x00"), +Section(".gnu.version", 0x43C, "\x00\x00\x00\x00\x00\x00\x02\x00\x01\x00\x03\x00\x03\x00\x01\x00\x03\x00\x01\x00"), +Section(".gnu.version_r", 0x450, "\x01\x00\x02\x00\x2f\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x97\x91\x96\x06\x00\x00\x03\x00\x39\x00\x00\x00\x10\x00\x00\x00\xb4\x91\x96\x06\x00\x00\x02\x00\x44\x00\x00\x00\x00\x00\x00\x00"), +Section(".rela.dyn", 0x480, "\x90\xfd\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x50\x07\x00\x00\x00\x00\x00\x00\x98\xfd\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\xf0\xff\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x74\x07\x00\x00\x00\x00\x00\x00\x08\x00\x02\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x08\x00\x02\x00\x00\x00\x00\x00\xd8\xff\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe0\xff\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe8\xff\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf8\xff\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".rela.plt", 0x540, "\xa8\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb0\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb8\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc0\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc8\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".init", 0x5B8, "\x1f\x20\x03\xd5\xfd\x7b\xbf\xa9\xfd\x03\x00\x91\x2c\x00\x00\x94\xfd\x7b\xc1\xa8\xc0\x03\x5f\xd6"), +Section(".plt", 0x5D0, "\xf0\x7b\xbf\xa9\xf0\x00\x00\xf0\x11\xd2\x47\xf9\x10\x82\x3e\x91\x20\x02\x1f\xd6\x1f\x20\x03\xd5\x1f\x20\x03\xd5\x1f\x20\x03\xd5\xf0\x00\x00\xf0\x11\xd6\x47\xf9\x10\xa2\x3e\x91\x20\x02\x1f\xd6\xf0\x00\x00\xf0\x11\xda\x47\xf9\x10\xc2\x3e\x91\x20\x02\x1f\xd6\xf0\x00\x00\xf0\x11\xde\x47\xf9\x10\xe2\x3e\x91\x20\x02\x1f\xd6\xf0\x00\x00\xf0\x11\xe2\x47\xf9\x10\x02\x3f\x91\x20\x02\x1f\xd6\xf0\x00\x00\xf0\x11\xe6\x47\xf9\x10\x22\x3f\x91\x20\x02\x1f\xd6"), +Section(".text", 0x640, "\x1f\x20\x03\xd5\x1d\x00\x80\xd2\x1e\x00\x80\xd2\xe5\x03\x00\xaa\xe1\x03\x40\xf9\xe2\x23\x00\x91\xe6\x03\x00\x91\xe0\x00\x00\xf0\x00\xf8\x47\xf9\x03\x00\x80\xd2\x04\x00\x80\xd2\xe1\xff\xff\x97\xf0\xff\xff\x97\xe0\x00\x00\xf0\x00\xf4\x47\xf9\x40\x00\x00\xb4\xe8\xff\xff\x17\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\x1f\x20\x03\xd5\x00\x01\x00\x90\x00\x40\x00\x91\x01\x01\x00\x90\x21\x40\x00\x91\x3f\x00\x00\xeb\xc0\x00\x00\x54\xe1\x00\x00\xf0\x21\xec\x47\xf9\x61\x00\x00\xb4\xf0\x03\x01\xaa\x00\x02\x1f\xd6\xc0\x03\x5f\xd6\x00\x01\x00\x90\x00\x40\x00\x91\x01\x01\x00\x90\x21\x40\x00\x91\x21\x00\x00\xcb\x22\xfc\x7f\xd3\x41\x0c\x81\x8b\x21\xfc\x41\x93\xc1\x00\x00\xb4\xe2\x00\x00\xf0\x42\xfc\x47\xf9\x62\x00\x00\xb4\xf0\x03\x02\xaa\x00\x02\x1f\xd6\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\xfd\x7b\xbe\xa9\xfd\x03\x00\x91\xf3\x0b\x00\xf9\x13\x01\x00\x90\x60\x42\x40\x39\x40\x01\x00\x35\xe0\x00\x00\xf0\x00\xf0\x47\xf9\x80\x00\x00\xb4\x00\x01\x00\x90\x00\x04\x40\xf9\xb5\xff\xff\x97\xd8\xff\xff\x97\x20\x00\x80\x52\x60\x42\x00\x39\xf3\x0b\x40\xf9\xfd\x7b\xc2\xa8\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\x1f\x20\x03\xd5\xdc\xff\xff\x17\xff\x83\x00\xd1\xe0\x07\x00\xf9\xe0\x07\x40\xf9\x00\x40\x00\x91\xe0\x0f\x00\xf9\xe0\x0f\x40\xf9\xff\x83\x00\x91\xc0\x03\x5f\xd6\xfd\x7b\xbd\xa9\xfd\x03\x00\x91\x80\x02\x80\xd2\xa4\xff\xff\x97\xe0\x0f\x00\xf9\xe0\x0f\x40\xf9\x81\x01\x80\x52\x01\x00\x00\xb9\xe0\x0f\x40\xf9\x00\x40\x00\x91\xe0\x13\x00\xf9\xe0\x13\x40\xf9\xa1\x01\x80\x52\x01\x00\x00\xb9\xe0\x13\x40\xf9\xe9\xff\xff\x97\xe0\x17\x00\xf9\xe0\x17\x40\xf9\xc1\x01\x80\x52\x01\x00\x00\xb9\x00\x00\x80\x52\xfd\x7b\xc3\xa8\xc0\x03\x5f\xd6"), +Section(".fini", 0x7D0, "\x1f\x20\x03\xd5\xfd\x7b\xbf\xa9\xfd\x03\x00\x91\xfd\x7b\xc1\xa8\xc0\x03\x5f\xd6"), +Section(".rodata", 0x7E4, "\x01\x00\x02\x00"), +Section(".eh_frame_hdr", 0x7E8, "\x01\x1b\x03\x3b\x44\x00\x00\x00\x07\x00\x00\x00\x58\xfe\xff\xff\x5c\x00\x00\x00\xa8\xfe\xff\xff\x70\x00\x00\x00\xd8\xfe\xff\xff\x84\x00\x00\x00\x18\xff\xff\xff\x98\x00\x00\x00\x68\xff\xff\xff\xbc\x00\x00\x00\x6c\xff\xff\xff\xd0\x00\x00\x00\x8c\xff\xff\xff\xe8\x00\x00\x00"), +Section(".eh_frame", 0x830, "\x10\x00\x00\x00\x00\x00\x00\x00\x01\x7a\x52\x00\x04\x78\x1e\x01\x1b\x0c\x1f\x00\x10\x00\x00\x00\x18\x00\x00\x00\xf4\xfd\xff\xff\x34\x00\x00\x00\x00\x41\x07\x1e\x10\x00\x00\x00\x2c\x00\x00\x00\x30\xfe\xff\xff\x30\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x40\x00\x00\x00\x4c\xfe\xff\xff\x3c\x00\x00\x00\x00\x00\x00\x00\x20\x00\x00\x00\x54\x00\x00\x00\x78\xfe\xff\xff\x48\x00\x00\x00\x00\x41\x0e\x20\x9d\x04\x9e\x03\x42\x93\x02\x4e\xde\xdd\xd3\x0e\x00\x00\x00\x00\x10\x00\x00\x00\x78\x00\x00\x00\xa4\xfe\xff\xff\x04\x00\x00\x00\x00\x00\x00\x00\x14\x00\x00\x00\x8c\x00\x00\x00\x94\xfe\xff\xff\x20\x00\x00\x00\x00\x41\x0e\x20\x46\x0e\x00\x00\x1c\x00\x00\x00\xa4\x00\x00\x00\x9c\xfe\xff\xff\x5c\x00\x00\x00\x00\x41\x0e\x30\x9d\x06\x9e\x05\x55\xde\xdd\x0e\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".fini_array", 0x1FD98, "\x00\x07\x00\x00\x00\x00\x00\x00"), +Section(".dynamic", 0x1FDA0, "\x01\x00\x00\x00\x00\x00\x00\x00\x2f\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00\xb8\x05\x00\x00\x00\x00\x00\x00\x0d\x00\x00\x00\x00\x00\x00\x00\xd0\x07\x00\x00\x00\x00\x00\x00\x19\x00\x00\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x98\xfd\x01\x00\x00\x00\x00\x00\x1c\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\xf5\xfe\xff\x6f\x00\x00\x00\x00\x98\x02\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\xa8\x03\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\xb8\x02\x00\x00\x00\x00\x00\x00\x0a\x00\x00\x00\x00\x00\x00\x00\x94\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x00\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x15\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x90\xff\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x78\x00\x00\x00\x00\x00\x00\x00\x14\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x17\x00\x00\x00\x00\x00\x00\x00\x40\x05\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x80\x04\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\xfb\xff\xff\x6f\x00\x00\x00\x00\x01\x00\x00\x08\x00\x00\x00\x00\xfe\xff\xff\x6f\x00\x00\x00\x00\x50\x04\x00\x00\x00\x00\x00\x00\xff\xff\xff\x6f\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xf0\xff\xff\x6f\x00\x00\x00\x00\x3c\x04\x00\x00\x00\x00\x00\x00\xf9\xff\xff\x6f\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".got", 0x1FF90, "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd0\x05\x00\x00\x00\x00\x00\x00\xd0\x05\x00\x00\x00\x00\x00\x00\xd0\x05\x00\x00\x00\x00\x00\x00\xd0\x05\x00\x00\x00\x00\x00\x00\xd0\x05\x00\x00\x00\x00\x00\x00\xa0\xfd\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x74\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".data", 0x20000, "\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x02\x00\x00\x00\x00\x00"), +Section(".init_array", 0x1FD90, "\x50\x07\x00\x00\x00\x00\x00\x00")]), +Memmap([Annotation(Region(0x0,0x8F3), Attr("segment","02 0 2292")), +Annotation(Region(0x640,0x673), Attr("symbol","_start")), +Annotation(Region(0x0,0xF9), Attr("section",".shstrtab")), +Annotation(Region(0x0,0x24C), Attr("section",".strtab")), +Annotation(Region(0x0,0x83F), Attr("section",".symtab")), +Annotation(Region(0x0,0x2A), Attr("section",".comment")), +Annotation(Region(0x238,0x252), Attr("section",".interp")), +Annotation(Region(0x254,0x277), Attr("section",".note.gnu.build-id")), +Annotation(Region(0x278,0x297), Attr("section",".note.ABI-tag")), +Annotation(Region(0x298,0x2B3), Attr("section",".gnu.hash")), +Annotation(Region(0x2B8,0x3A7), Attr("section",".dynsym")), +Annotation(Region(0x3A8,0x43B), Attr("section",".dynstr")), +Annotation(Region(0x43C,0x44F), Attr("section",".gnu.version")), +Annotation(Region(0x450,0x47F), Attr("section",".gnu.version_r")), +Annotation(Region(0x480,0x53F), Attr("section",".rela.dyn")), +Annotation(Region(0x540,0x5B7), Attr("section",".rela.plt")), +Annotation(Region(0x5B8,0x5CF), Attr("section",".init")), +Annotation(Region(0x5D0,0x63F), Attr("section",".plt")), +Annotation(Region(0x5B8,0x5CF), Attr("code-region","()")), +Annotation(Region(0x5D0,0x63F), Attr("code-region","()")), +Annotation(Region(0x640,0x673), Attr("symbol-info","_start 0x640 52")), +Annotation(Region(0x674,0x687), Attr("symbol","call_weak_fn")), +Annotation(Region(0x674,0x687), Attr("symbol-info","call_weak_fn 0x674 20")), +Annotation(Region(0x754,0x773), Attr("symbol","callee")), +Annotation(Region(0x640,0x7CF), Attr("section",".text")), +Annotation(Region(0x640,0x7CF), Attr("code-region","()")), +Annotation(Region(0x754,0x773), Attr("symbol-info","callee 0x754 32")), +Annotation(Region(0x774,0x7CF), Attr("symbol","main")), +Annotation(Region(0x774,0x7CF), Attr("symbol-info","main 0x774 92")), +Annotation(Region(0x7D0,0x7E3), Attr("section",".fini")), +Annotation(Region(0x7D0,0x7E3), Attr("code-region","()")), +Annotation(Region(0x7E4,0x7E7), Attr("section",".rodata")), +Annotation(Region(0x7E8,0x82B), Attr("section",".eh_frame_hdr")), +Annotation(Region(0x830,0x8F3), Attr("section",".eh_frame")), +Annotation(Region(0x1FD90,0x2000F), Attr("segment","03 0x1FD90 648")), +Annotation(Region(0x1FD98,0x1FD9F), Attr("section",".fini_array")), +Annotation(Region(0x1FDA0,0x1FF8F), Attr("section",".dynamic")), +Annotation(Region(0x1FF90,0x1FFFF), Attr("section",".got")), +Annotation(Region(0x20000,0x2000F), Attr("section",".data")), +Annotation(Region(0x1FD90,0x1FD97), Attr("section",".init_array"))]), +Program(Tid(1_844, "%00000734"), Attrs([]), + Subs([Sub(Tid(1_789, "@__cxa_finalize"), Attrs([Attr("address","0x600"), +Attr("stub","()"), Attr("c.proto","signed (*)(void)")]), "__cxa_finalize", + Args([Arg(Tid(1_845, "%00000735"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("__cxa_finalize_result",Imm(32)), LOW(32,Var("R0",Imm(64))), Out())]), +Blks([Blk(Tid(1_113, "@__cxa_finalize"), Attrs([Attr("address","0x600")]), + Phis([]), Defs([Def(Tid(1_377, "%00000561"), Attrs([Attr("address","0x600"), +Attr("insn","adrp x16, #126976")]), Var("R16",Imm(64)), Int(126976,64)), +Def(Tid(1_384, "%00000568"), Attrs([Attr("address","0x604"), +Attr("insn","ldr x17, [x16, #0xfb0]")]), Var("R17",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R16",Imm(64)),Int(4016,64)),LittleEndian(),64)), +Def(Tid(1_390, "%0000056e"), Attrs([Attr("address","0x608"), +Attr("insn","add x16, x16, #0xfb0")]), Var("R16",Imm(64)), +PLUS(Var("R16",Imm(64)),Int(4016,64)))]), Jmps([Call(Tid(1_395, "%00000573"), + Attrs([Attr("address","0x60C"), Attr("insn","br x17")]), Int(1,1), +(Indirect(Var("R17",Imm(64))),))]))])), +Sub(Tid(1_790, "@__do_global_dtors_aux"), Attrs([Attr("address","0x700"), +Attr("c.proto","signed (*)(void)")]), "__do_global_dtors_aux", + Args([Arg(Tid(1_846, "%00000736"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("__do_global_dtors_aux_result",Imm(32)), LOW(32,Var("R0",Imm(64))), +Out())]), Blks([Blk(Tid(735, "@__do_global_dtors_aux"), + Attrs([Attr("address","0x700")]), Phis([]), Defs([Def(Tid(739, "%000002e3"), + Attrs([Attr("address","0x700"), +Attr("insn","stp x29, x30, [sp, #-0x20]!")]), Var("#3",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(18446744073709551584,64))), +Def(Tid(745, "%000002e9"), Attrs([Attr("address","0x700"), +Attr("insn","stp x29, x30, [sp, #-0x20]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("#3",Imm(64)),Var("R29",Imm(64)),LittleEndian(),64)), +Def(Tid(751, "%000002ef"), Attrs([Attr("address","0x700"), +Attr("insn","stp x29, x30, [sp, #-0x20]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("#3",Imm(64)),Int(8,64)),Var("R30",Imm(64)),LittleEndian(),64)), +Def(Tid(755, "%000002f3"), Attrs([Attr("address","0x700"), +Attr("insn","stp x29, x30, [sp, #-0x20]!")]), Var("R31",Imm(64)), +Var("#3",Imm(64))), Def(Tid(761, "%000002f9"), + Attrs([Attr("address","0x704"), Attr("insn","mov x29, sp")]), + Var("R29",Imm(64)), Var("R31",Imm(64))), Def(Tid(769, "%00000301"), + Attrs([Attr("address","0x708"), Attr("insn","str x19, [sp, #0x10]")]), + Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(16,64)),Var("R19",Imm(64)),LittleEndian(),64)), +Def(Tid(774, "%00000306"), Attrs([Attr("address","0x70C"), +Attr("insn","adrp x19, #131072")]), Var("R19",Imm(64)), Int(131072,64)), +Def(Tid(781, "%0000030d"), Attrs([Attr("address","0x710"), +Attr("insn","ldrb w0, [x19, #0x10]")]), Var("R0",Imm(64)), +UNSIGNED(64,Load(Var("mem",Mem(64,8)),PLUS(Var("R19",Imm(64)),Int(16,64)),LittleEndian(),8)))]), +Jmps([Goto(Tid(788, "%00000314"), Attrs([Attr("address","0x714"), +Attr("insn","cbnz w0, #0x28")]), + NEQ(Extract(31,0,Var("R0",Imm(64))),Int(0,32)), +Direct(Tid(786, "%00000312"))), Goto(Tid(1_834, "%0000072a"), Attrs([]), + Int(1,1), Direct(Tid(1_058, "%00000422")))])), Blk(Tid(1_058, "%00000422"), + Attrs([Attr("address","0x718")]), Phis([]), +Defs([Def(Tid(1_061, "%00000425"), Attrs([Attr("address","0x718"), +Attr("insn","adrp x0, #126976")]), Var("R0",Imm(64)), Int(126976,64)), +Def(Tid(1_068, "%0000042c"), Attrs([Attr("address","0x71C"), +Attr("insn","ldr x0, [x0, #0xfe0]")]), Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R0",Imm(64)),Int(4064,64)),LittleEndian(),64))]), +Jmps([Goto(Tid(1_074, "%00000432"), Attrs([Attr("address","0x720"), +Attr("insn","cbz x0, #0x10")]), EQ(Var("R0",Imm(64)),Int(0,64)), +Direct(Tid(1_072, "%00000430"))), Goto(Tid(1_835, "%0000072b"), Attrs([]), + Int(1,1), Direct(Tid(1_097, "%00000449")))])), Blk(Tid(1_097, "%00000449"), + Attrs([Attr("address","0x724")]), Phis([]), +Defs([Def(Tid(1_100, "%0000044c"), Attrs([Attr("address","0x724"), +Attr("insn","adrp x0, #131072")]), Var("R0",Imm(64)), Int(131072,64)), +Def(Tid(1_107, "%00000453"), Attrs([Attr("address","0x728"), +Attr("insn","ldr x0, [x0, #0x8]")]), Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R0",Imm(64)),Int(8,64)),LittleEndian(),64)), +Def(Tid(1_112, "%00000458"), Attrs([Attr("address","0x72C"), +Attr("insn","bl #-0x12c")]), Var("R30",Imm(64)), Int(1840,64))]), +Jmps([Call(Tid(1_115, "%0000045b"), Attrs([Attr("address","0x72C"), +Attr("insn","bl #-0x12c")]), Int(1,1), +(Direct(Tid(1_789, "@__cxa_finalize")),Direct(Tid(1_072, "%00000430"))))])), +Blk(Tid(1_072, "%00000430"), Attrs([Attr("address","0x730")]), Phis([]), +Defs([Def(Tid(1_080, "%00000438"), Attrs([Attr("address","0x730"), +Attr("insn","bl #-0xa0")]), Var("R30",Imm(64)), Int(1844,64))]), +Jmps([Call(Tid(1_082, "%0000043a"), Attrs([Attr("address","0x730"), +Attr("insn","bl #-0xa0")]), Int(1,1), +(Direct(Tid(1_804, "@deregister_tm_clones")),Direct(Tid(1_084, "%0000043c"))))])), +Blk(Tid(1_084, "%0000043c"), Attrs([Attr("address","0x734")]), Phis([]), +Defs([Def(Tid(1_087, "%0000043f"), Attrs([Attr("address","0x734"), +Attr("insn","mov w0, #0x1")]), Var("R0",Imm(64)), Int(1,64)), +Def(Tid(1_095, "%00000447"), Attrs([Attr("address","0x738"), +Attr("insn","strb w0, [x19, #0x10]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R19",Imm(64)),Int(16,64)),Extract(7,0,Var("R0",Imm(64))),LittleEndian(),8))]), +Jmps([Goto(Tid(1_836, "%0000072c"), Attrs([]), Int(1,1), +Direct(Tid(786, "%00000312")))])), Blk(Tid(786, "%00000312"), + Attrs([Attr("address","0x73C")]), Phis([]), Defs([Def(Tid(796, "%0000031c"), + Attrs([Attr("address","0x73C"), Attr("insn","ldr x19, [sp, #0x10]")]), + Var("R19",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(16,64)),LittleEndian(),64)), +Def(Tid(803, "%00000323"), Attrs([Attr("address","0x740"), +Attr("insn","ldp x29, x30, [sp], #0x20")]), Var("R29",Imm(64)), +Load(Var("mem",Mem(64,8)),Var("R31",Imm(64)),LittleEndian(),64)), +Def(Tid(808, "%00000328"), Attrs([Attr("address","0x740"), +Attr("insn","ldp x29, x30, [sp], #0x20")]), Var("R30",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(8,64)),LittleEndian(),64)), +Def(Tid(812, "%0000032c"), Attrs([Attr("address","0x740"), +Attr("insn","ldp x29, x30, [sp], #0x20")]), Var("R31",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(32,64)))]), Jmps([Call(Tid(817, "%00000331"), + Attrs([Attr("address","0x744"), Attr("insn","ret")]), Int(1,1), +(Indirect(Var("R30",Imm(64))),))]))])), Sub(Tid(1_794, "@__libc_start_main"), + Attrs([Attr("address","0x5F0"), Attr("stub","()"), +Attr("c.proto","signed (*)(signed (*)(signed , char** , char** );* main, signed , char** , \nvoid* auxv)")]), + "__libc_start_main", Args([Arg(Tid(1_847, "%00000737"), + Attrs([Attr("c.data","Top:u64 ptr ptr"), +Attr("c.layout","**[ : 64]"), +Attr("c.type","signed (*)(signed , char** , char** );*")]), + Var("__libc_start_main_main",Imm(64)), Var("R0",Imm(64)), In()), +Arg(Tid(1_848, "%00000738"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("__libc_start_main_arg2",Imm(32)), LOW(32,Var("R1",Imm(64))), In()), +Arg(Tid(1_849, "%00000739"), Attrs([Attr("c.data","Top:u8 ptr ptr"), +Attr("c.layout","**[char : 8]"), Attr("c.type","char**")]), + Var("__libc_start_main_arg3",Imm(64)), Var("R2",Imm(64)), Both()), +Arg(Tid(1_850, "%0000073a"), Attrs([Attr("c.data","{} ptr"), +Attr("c.layout","*[ : 8]"), Attr("c.type","void*")]), + Var("__libc_start_main_auxv",Imm(64)), Var("R3",Imm(64)), Both()), +Arg(Tid(1_851, "%0000073b"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("__libc_start_main_result",Imm(32)), LOW(32,Var("R0",Imm(64))), +Out())]), Blks([Blk(Tid(568, "@__libc_start_main"), + Attrs([Attr("address","0x5F0")]), Phis([]), +Defs([Def(Tid(1_355, "%0000054b"), Attrs([Attr("address","0x5F0"), +Attr("insn","adrp x16, #126976")]), Var("R16",Imm(64)), Int(126976,64)), +Def(Tid(1_362, "%00000552"), Attrs([Attr("address","0x5F4"), +Attr("insn","ldr x17, [x16, #0xfa8]")]), Var("R17",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R16",Imm(64)),Int(4008,64)),LittleEndian(),64)), +Def(Tid(1_368, "%00000558"), Attrs([Attr("address","0x5F8"), +Attr("insn","add x16, x16, #0xfa8")]), Var("R16",Imm(64)), +PLUS(Var("R16",Imm(64)),Int(4008,64)))]), Jmps([Call(Tid(1_373, "%0000055d"), + Attrs([Attr("address","0x5FC"), Attr("insn","br x17")]), Int(1,1), +(Indirect(Var("R17",Imm(64))),))]))])), Sub(Tid(1_795, "@_fini"), + Attrs([Attr("address","0x7D0"), Attr("c.proto","signed (*)(void)")]), + "_fini", Args([Arg(Tid(1_852, "%0000073c"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("_fini_result",Imm(32)), LOW(32,Var("R0",Imm(64))), Out())]), +Blks([Blk(Tid(52, "@_fini"), Attrs([Attr("address","0x7D0")]), Phis([]), +Defs([Def(Tid(58, "%0000003a"), Attrs([Attr("address","0x7D4"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("#0",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(18446744073709551600,64))), +Def(Tid(64, "%00000040"), Attrs([Attr("address","0x7D4"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("#0",Imm(64)),Var("R29",Imm(64)),LittleEndian(),64)), +Def(Tid(70, "%00000046"), Attrs([Attr("address","0x7D4"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("#0",Imm(64)),Int(8,64)),Var("R30",Imm(64)),LittleEndian(),64)), +Def(Tid(74, "%0000004a"), Attrs([Attr("address","0x7D4"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("R31",Imm(64)), +Var("#0",Imm(64))), Def(Tid(80, "%00000050"), Attrs([Attr("address","0x7D8"), +Attr("insn","mov x29, sp")]), Var("R29",Imm(64)), Var("R31",Imm(64))), +Def(Tid(87, "%00000057"), Attrs([Attr("address","0x7DC"), +Attr("insn","ldp x29, x30, [sp], #0x10")]), Var("R29",Imm(64)), +Load(Var("mem",Mem(64,8)),Var("R31",Imm(64)),LittleEndian(),64)), +Def(Tid(92, "%0000005c"), Attrs([Attr("address","0x7DC"), +Attr("insn","ldp x29, x30, [sp], #0x10")]), Var("R30",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(8,64)),LittleEndian(),64)), +Def(Tid(96, "%00000060"), Attrs([Attr("address","0x7DC"), +Attr("insn","ldp x29, x30, [sp], #0x10")]), Var("R31",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(16,64)))]), Jmps([Call(Tid(101, "%00000065"), + Attrs([Attr("address","0x7E0"), Attr("insn","ret")]), Int(1,1), +(Indirect(Var("R30",Imm(64))),))]))])), Sub(Tid(1_796, "@_init"), + Attrs([Attr("address","0x5B8"), Attr("c.proto","signed (*)(void)")]), + "_init", Args([Arg(Tid(1_853, "%0000073d"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("_init_result",Imm(32)), LOW(32,Var("R0",Imm(64))), Out())]), +Blks([Blk(Tid(1_596, "@_init"), Attrs([Attr("address","0x5B8")]), Phis([]), +Defs([Def(Tid(1_602, "%00000642"), Attrs([Attr("address","0x5BC"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("#6",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(18446744073709551600,64))), +Def(Tid(1_608, "%00000648"), Attrs([Attr("address","0x5BC"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("#6",Imm(64)),Var("R29",Imm(64)),LittleEndian(),64)), +Def(Tid(1_614, "%0000064e"), Attrs([Attr("address","0x5BC"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("#6",Imm(64)),Int(8,64)),Var("R30",Imm(64)),LittleEndian(),64)), +Def(Tid(1_618, "%00000652"), Attrs([Attr("address","0x5BC"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("R31",Imm(64)), +Var("#6",Imm(64))), Def(Tid(1_624, "%00000658"), + Attrs([Attr("address","0x5C0"), Attr("insn","mov x29, sp")]), + Var("R29",Imm(64)), Var("R31",Imm(64))), Def(Tid(1_629, "%0000065d"), + Attrs([Attr("address","0x5C4"), Attr("insn","bl #0xb0")]), + Var("R30",Imm(64)), Int(1480,64))]), Jmps([Call(Tid(1_631, "%0000065f"), + Attrs([Attr("address","0x5C4"), Attr("insn","bl #0xb0")]), Int(1,1), +(Direct(Tid(1_801, "@call_weak_fn")),Direct(Tid(1_633, "%00000661"))))])), +Blk(Tid(1_633, "%00000661"), Attrs([Attr("address","0x5C8")]), Phis([]), +Defs([Def(Tid(1_638, "%00000666"), Attrs([Attr("address","0x5C8"), +Attr("insn","ldp x29, x30, [sp], #0x10")]), Var("R29",Imm(64)), +Load(Var("mem",Mem(64,8)),Var("R31",Imm(64)),LittleEndian(),64)), +Def(Tid(1_643, "%0000066b"), Attrs([Attr("address","0x5C8"), +Attr("insn","ldp x29, x30, [sp], #0x10")]), Var("R30",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(8,64)),LittleEndian(),64)), +Def(Tid(1_647, "%0000066f"), Attrs([Attr("address","0x5C8"), +Attr("insn","ldp x29, x30, [sp], #0x10")]), Var("R31",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(16,64)))]), Jmps([Call(Tid(1_652, "%00000674"), + Attrs([Attr("address","0x5CC"), Attr("insn","ret")]), Int(1,1), +(Indirect(Var("R30",Imm(64))),))]))])), Sub(Tid(1_797, "@_start"), + Attrs([Attr("address","0x640"), Attr("stub","()"), Attr("entry-point","()"), +Attr("c.proto","signed (*)(void)")]), "_start", + Args([Arg(Tid(1_854, "%0000073e"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("_start_result",Imm(32)), LOW(32,Var("R0",Imm(64))), Out())]), +Blks([Blk(Tid(505, "@_start"), Attrs([Attr("address","0x640")]), Phis([]), +Defs([Def(Tid(510, "%000001fe"), Attrs([Attr("address","0x644"), +Attr("insn","mov x29, #0x0")]), Var("R29",Imm(64)), Int(0,64)), +Def(Tid(515, "%00000203"), Attrs([Attr("address","0x648"), +Attr("insn","mov x30, #0x0")]), Var("R30",Imm(64)), Int(0,64)), +Def(Tid(521, "%00000209"), Attrs([Attr("address","0x64C"), +Attr("insn","mov x5, x0")]), Var("R5",Imm(64)), Var("R0",Imm(64))), +Def(Tid(528, "%00000210"), Attrs([Attr("address","0x650"), +Attr("insn","ldr x1, [sp]")]), Var("R1",Imm(64)), +Load(Var("mem",Mem(64,8)),Var("R31",Imm(64)),LittleEndian(),64)), +Def(Tid(534, "%00000216"), Attrs([Attr("address","0x654"), +Attr("insn","add x2, sp, #0x8")]), Var("R2",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(8,64))), Def(Tid(540, "%0000021c"), + Attrs([Attr("address","0x658"), Attr("insn","mov x6, sp")]), + Var("R6",Imm(64)), Var("R31",Imm(64))), Def(Tid(545, "%00000221"), + Attrs([Attr("address","0x65C"), Attr("insn","adrp x0, #126976")]), + Var("R0",Imm(64)), Int(126976,64)), Def(Tid(552, "%00000228"), + Attrs([Attr("address","0x660"), Attr("insn","ldr x0, [x0, #0xff0]")]), + Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R0",Imm(64)),Int(4080,64)),LittleEndian(),64)), +Def(Tid(557, "%0000022d"), Attrs([Attr("address","0x664"), +Attr("insn","mov x3, #0x0")]), Var("R3",Imm(64)), Int(0,64)), +Def(Tid(562, "%00000232"), Attrs([Attr("address","0x668"), +Attr("insn","mov x4, #0x0")]), Var("R4",Imm(64)), Int(0,64)), +Def(Tid(567, "%00000237"), Attrs([Attr("address","0x66C"), +Attr("insn","bl #-0x7c")]), Var("R30",Imm(64)), Int(1648,64))]), +Jmps([Call(Tid(570, "%0000023a"), Attrs([Attr("address","0x66C"), +Attr("insn","bl #-0x7c")]), Int(1,1), +(Direct(Tid(1_794, "@__libc_start_main")),Direct(Tid(572, "%0000023c"))))])), +Blk(Tid(572, "%0000023c"), Attrs([Attr("address","0x670")]), Phis([]), +Defs([Def(Tid(575, "%0000023f"), Attrs([Attr("address","0x670"), +Attr("insn","bl #-0x40")]), Var("R30",Imm(64)), Int(1652,64))]), +Jmps([Call(Tid(578, "%00000242"), Attrs([Attr("address","0x670"), +Attr("insn","bl #-0x40")]), Int(1,1), +(Direct(Tid(1_800, "@abort")),Direct(Tid(1_837, "%0000072d"))))])), +Blk(Tid(1_837, "%0000072d"), Attrs([]), Phis([]), Defs([]), +Jmps([Call(Tid(1_838, "%0000072e"), Attrs([]), Int(1,1), +(Direct(Tid(1_801, "@call_weak_fn")),))]))])), Sub(Tid(1_800, "@abort"), + Attrs([Attr("address","0x630"), Attr("stub","()"), Attr("noreturn","()"), +Attr("c.proto","void (*)(void)")]), "abort", Args([]), +Blks([Blk(Tid(576, "@abort"), Attrs([Attr("address","0x630")]), Phis([]), +Defs([Def(Tid(1_443, "%000005a3"), Attrs([Attr("address","0x630"), +Attr("insn","adrp x16, #126976")]), Var("R16",Imm(64)), Int(126976,64)), +Def(Tid(1_450, "%000005aa"), Attrs([Attr("address","0x634"), +Attr("insn","ldr x17, [x16, #0xfc8]")]), Var("R17",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R16",Imm(64)),Int(4040,64)),LittleEndian(),64)), +Def(Tid(1_456, "%000005b0"), Attrs([Attr("address","0x638"), +Attr("insn","add x16, x16, #0xfc8")]), Var("R16",Imm(64)), +PLUS(Var("R16",Imm(64)),Int(4040,64)))]), Jmps([Call(Tid(1_461, "%000005b5"), + Attrs([Attr("address","0x63C"), Attr("insn","br x17")]), Int(1,1), +(Indirect(Var("R17",Imm(64))),))]))])), Sub(Tid(1_801, "@call_weak_fn"), + Attrs([Attr("address","0x674"), Attr("c.proto","signed (*)(void)")]), + "call_weak_fn", Args([Arg(Tid(1_855, "%0000073f"), + Attrs([Attr("c.data","Top:u32"), Attr("c.layout","[signed : 32]"), +Attr("c.type","signed")]), Var("call_weak_fn_result",Imm(32)), +LOW(32,Var("R0",Imm(64))), Out())]), Blks([Blk(Tid(580, "@call_weak_fn"), + Attrs([Attr("address","0x674")]), Phis([]), Defs([Def(Tid(583, "%00000247"), + Attrs([Attr("address","0x674"), Attr("insn","adrp x0, #126976")]), + Var("R0",Imm(64)), Int(126976,64)), Def(Tid(590, "%0000024e"), + Attrs([Attr("address","0x678"), Attr("insn","ldr x0, [x0, #0xfe8]")]), + Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R0",Imm(64)),Int(4072,64)),LittleEndian(),64))]), +Jmps([Goto(Tid(596, "%00000254"), Attrs([Attr("address","0x67C"), +Attr("insn","cbz x0, #0x8")]), EQ(Var("R0",Imm(64)),Int(0,64)), +Direct(Tid(594, "%00000252"))), Goto(Tid(1_839, "%0000072f"), Attrs([]), + Int(1,1), Direct(Tid(1_177, "%00000499")))])), Blk(Tid(594, "%00000252"), + Attrs([Attr("address","0x684")]), Phis([]), Defs([]), +Jmps([Call(Tid(602, "%0000025a"), Attrs([Attr("address","0x684"), +Attr("insn","ret")]), Int(1,1), (Indirect(Var("R30",Imm(64))),))])), +Blk(Tid(1_177, "%00000499"), Attrs([Attr("address","0x680")]), Phis([]), +Defs([]), Jmps([Goto(Tid(1_180, "%0000049c"), Attrs([Attr("address","0x680"), +Attr("insn","b #-0x60")]), Int(1,1), +Direct(Tid(1_178, "@__gmon_start__")))])), Blk(Tid(1_178, "@__gmon_start__"), + Attrs([Attr("address","0x620")]), Phis([]), +Defs([Def(Tid(1_421, "%0000058d"), Attrs([Attr("address","0x620"), +Attr("insn","adrp x16, #126976")]), Var("R16",Imm(64)), Int(126976,64)), +Def(Tid(1_428, "%00000594"), Attrs([Attr("address","0x624"), +Attr("insn","ldr x17, [x16, #0xfc0]")]), Var("R17",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R16",Imm(64)),Int(4032,64)),LittleEndian(),64)), +Def(Tid(1_434, "%0000059a"), Attrs([Attr("address","0x628"), +Attr("insn","add x16, x16, #0xfc0")]), Var("R16",Imm(64)), +PLUS(Var("R16",Imm(64)),Int(4032,64)))]), Jmps([Call(Tid(1_439, "%0000059f"), + Attrs([Attr("address","0x62C"), Attr("insn","br x17")]), Int(1,1), +(Indirect(Var("R17",Imm(64))),))]))])), Sub(Tid(1_803, "@callee"), + Attrs([Attr("address","0x754"), Attr("c.proto","signed (*)(void)")]), + "callee", Args([Arg(Tid(1_856, "%00000740"), + Attrs([Attr("c.data","Top:u32"), Attr("c.layout","[signed : 32]"), +Attr("c.type","signed")]), Var("callee_result",Imm(32)), +LOW(32,Var("R0",Imm(64))), Out())]), Blks([Blk(Tid(827, "@callee"), + Attrs([Attr("address","0x754")]), Phis([]), Defs([Def(Tid(831, "%0000033f"), + Attrs([Attr("address","0x754"), Attr("insn","sub sp, sp, #0x20")]), + Var("R31",Imm(64)), PLUS(Var("R31",Imm(64)),Int(18446744073709551584,64))), +Def(Tid(839, "%00000347"), Attrs([Attr("address","0x758"), +Attr("insn","str x0, [sp, #0x8]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(8,64)),Var("R0",Imm(64)),LittleEndian(),64)), +Def(Tid(846, "%0000034e"), Attrs([Attr("address","0x75C"), +Attr("insn","ldr x0, [sp, #0x8]")]), Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(8,64)),LittleEndian(),64)), +Def(Tid(852, "%00000354"), Attrs([Attr("address","0x760"), +Attr("insn","add x0, x0, #0x10")]), Var("R0",Imm(64)), +PLUS(Var("R0",Imm(64)),Int(16,64))), Def(Tid(860, "%0000035c"), + Attrs([Attr("address","0x764"), Attr("insn","str x0, [sp, #0x18]")]), + Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(24,64)),Var("R0",Imm(64)),LittleEndian(),64)), +Def(Tid(867, "%00000363"), Attrs([Attr("address","0x768"), +Attr("insn","ldr x0, [sp, #0x18]")]), Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(24,64)),LittleEndian(),64)), +Def(Tid(873, "%00000369"), Attrs([Attr("address","0x76C"), +Attr("insn","add sp, sp, #0x20")]), Var("R31",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(32,64)))]), Jmps([Call(Tid(878, "%0000036e"), + Attrs([Attr("address","0x770"), Attr("insn","ret")]), Int(1,1), +(Indirect(Var("R30",Imm(64))),))]))])), +Sub(Tid(1_804, "@deregister_tm_clones"), Attrs([Attr("address","0x690"), +Attr("c.proto","signed (*)(void)")]), "deregister_tm_clones", + Args([Arg(Tid(1_857, "%00000741"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("deregister_tm_clones_result",Imm(32)), LOW(32,Var("R0",Imm(64))), +Out())]), Blks([Blk(Tid(608, "@deregister_tm_clones"), + Attrs([Attr("address","0x690")]), Phis([]), Defs([Def(Tid(611, "%00000263"), + Attrs([Attr("address","0x690"), Attr("insn","adrp x0, #131072")]), + Var("R0",Imm(64)), Int(131072,64)), Def(Tid(617, "%00000269"), + Attrs([Attr("address","0x694"), Attr("insn","add x0, x0, #0x10")]), + Var("R0",Imm(64)), PLUS(Var("R0",Imm(64)),Int(16,64))), +Def(Tid(622, "%0000026e"), Attrs([Attr("address","0x698"), +Attr("insn","adrp x1, #131072")]), Var("R1",Imm(64)), Int(131072,64)), +Def(Tid(628, "%00000274"), Attrs([Attr("address","0x69C"), +Attr("insn","add x1, x1, #0x10")]), Var("R1",Imm(64)), +PLUS(Var("R1",Imm(64)),Int(16,64))), Def(Tid(634, "%0000027a"), + Attrs([Attr("address","0x6A0"), Attr("insn","cmp x1, x0")]), + Var("#1",Imm(64)), NOT(Var("R0",Imm(64)))), Def(Tid(639, "%0000027f"), + Attrs([Attr("address","0x6A0"), Attr("insn","cmp x1, x0")]), + Var("#2",Imm(64)), PLUS(Var("R1",Imm(64)),NOT(Var("R0",Imm(64))))), +Def(Tid(645, "%00000285"), Attrs([Attr("address","0x6A0"), +Attr("insn","cmp x1, x0")]), Var("VF",Imm(1)), +NEQ(SIGNED(65,PLUS(Var("#2",Imm(64)),Int(1,64))),PLUS(PLUS(SIGNED(65,Var("R1",Imm(64))),SIGNED(65,Var("#1",Imm(64)))),Int(1,65)))), +Def(Tid(651, "%0000028b"), Attrs([Attr("address","0x6A0"), +Attr("insn","cmp x1, x0")]), Var("CF",Imm(1)), +NEQ(UNSIGNED(65,PLUS(Var("#2",Imm(64)),Int(1,64))),PLUS(PLUS(UNSIGNED(65,Var("R1",Imm(64))),UNSIGNED(65,Var("#1",Imm(64)))),Int(1,65)))), +Def(Tid(655, "%0000028f"), Attrs([Attr("address","0x6A0"), +Attr("insn","cmp x1, x0")]), Var("ZF",Imm(1)), +EQ(PLUS(Var("#2",Imm(64)),Int(1,64)),Int(0,64))), Def(Tid(659, "%00000293"), + Attrs([Attr("address","0x6A0"), Attr("insn","cmp x1, x0")]), + Var("NF",Imm(1)), Extract(63,63,PLUS(Var("#2",Imm(64)),Int(1,64))))]), +Jmps([Goto(Tid(665, "%00000299"), Attrs([Attr("address","0x6A4"), +Attr("insn","b.eq #0x18")]), EQ(Var("ZF",Imm(1)),Int(1,1)), +Direct(Tid(663, "%00000297"))), Goto(Tid(1_840, "%00000730"), Attrs([]), + Int(1,1), Direct(Tid(1_147, "%0000047b")))])), Blk(Tid(1_147, "%0000047b"), + Attrs([Attr("address","0x6A8")]), Phis([]), +Defs([Def(Tid(1_150, "%0000047e"), Attrs([Attr("address","0x6A8"), +Attr("insn","adrp x1, #126976")]), Var("R1",Imm(64)), Int(126976,64)), +Def(Tid(1_157, "%00000485"), Attrs([Attr("address","0x6AC"), +Attr("insn","ldr x1, [x1, #0xfd8]")]), Var("R1",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R1",Imm(64)),Int(4056,64)),LittleEndian(),64))]), +Jmps([Goto(Tid(1_162, "%0000048a"), Attrs([Attr("address","0x6B0"), +Attr("insn","cbz x1, #0xc")]), EQ(Var("R1",Imm(64)),Int(0,64)), +Direct(Tid(663, "%00000297"))), Goto(Tid(1_841, "%00000731"), Attrs([]), + Int(1,1), Direct(Tid(1_166, "%0000048e")))])), Blk(Tid(663, "%00000297"), + Attrs([Attr("address","0x6BC")]), Phis([]), Defs([]), +Jmps([Call(Tid(671, "%0000029f"), Attrs([Attr("address","0x6BC"), +Attr("insn","ret")]), Int(1,1), (Indirect(Var("R30",Imm(64))),))])), +Blk(Tid(1_166, "%0000048e"), Attrs([Attr("address","0x6B4")]), Phis([]), +Defs([Def(Tid(1_170, "%00000492"), Attrs([Attr("address","0x6B4"), +Attr("insn","mov x16, x1")]), Var("R16",Imm(64)), Var("R1",Imm(64)))]), +Jmps([Call(Tid(1_175, "%00000497"), Attrs([Attr("address","0x6B8"), +Attr("insn","br x16")]), Int(1,1), (Indirect(Var("R16",Imm(64))),))]))])), +Sub(Tid(1_807, "@frame_dummy"), Attrs([Attr("address","0x750"), +Attr("c.proto","signed (*)(void)")]), "frame_dummy", + Args([Arg(Tid(1_858, "%00000742"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("frame_dummy_result",Imm(32)), LOW(32,Var("R0",Imm(64))), Out())]), +Blks([Blk(Tid(823, "@frame_dummy"), Attrs([Attr("address","0x750")]), + Phis([]), Defs([]), Jmps([Call(Tid(825, "%00000339"), + Attrs([Attr("address","0x750"), Attr("insn","b #-0x90")]), Int(1,1), +(Direct(Tid(1_810, "@register_tm_clones")),))]))])), Sub(Tid(1_808, "@main"), + Attrs([Attr("address","0x774"), +Attr("c.proto","signed (*)(signed argc, const char** argv)")]), "main", + Args([Arg(Tid(1_859, "%00000743"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("main_argc",Imm(32)), LOW(32,Var("R0",Imm(64))), In()), +Arg(Tid(1_860, "%00000744"), Attrs([Attr("c.data","Top:u8 ptr ptr"), +Attr("c.layout","**[char : 8]"), Attr("c.type"," const char**")]), + Var("main_argv",Imm(64)), Var("R1",Imm(64)), Both()), +Arg(Tid(1_861, "%00000745"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("main_result",Imm(32)), LOW(32,Var("R0",Imm(64))), Out())]), +Blks([Blk(Tid(880, "@main"), Attrs([Attr("address","0x774")]), Phis([]), +Defs([Def(Tid(884, "%00000374"), Attrs([Attr("address","0x774"), +Attr("insn","stp x29, x30, [sp, #-0x30]!")]), Var("#4",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(18446744073709551568,64))), +Def(Tid(890, "%0000037a"), Attrs([Attr("address","0x774"), +Attr("insn","stp x29, x30, [sp, #-0x30]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("#4",Imm(64)),Var("R29",Imm(64)),LittleEndian(),64)), +Def(Tid(896, "%00000380"), Attrs([Attr("address","0x774"), +Attr("insn","stp x29, x30, [sp, #-0x30]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("#4",Imm(64)),Int(8,64)),Var("R30",Imm(64)),LittleEndian(),64)), +Def(Tid(900, "%00000384"), Attrs([Attr("address","0x774"), +Attr("insn","stp x29, x30, [sp, #-0x30]!")]), Var("R31",Imm(64)), +Var("#4",Imm(64))), Def(Tid(906, "%0000038a"), + Attrs([Attr("address","0x778"), Attr("insn","mov x29, sp")]), + Var("R29",Imm(64)), Var("R31",Imm(64))), Def(Tid(911, "%0000038f"), + Attrs([Attr("address","0x77C"), Attr("insn","mov x0, #0x14")]), + Var("R0",Imm(64)), Int(20,64)), Def(Tid(916, "%00000394"), + Attrs([Attr("address","0x780"), Attr("insn","bl #-0x170")]), + Var("R30",Imm(64)), Int(1924,64))]), Jmps([Call(Tid(919, "%00000397"), + Attrs([Attr("address","0x780"), Attr("insn","bl #-0x170")]), Int(1,1), +(Direct(Tid(1_809, "@malloc")),Direct(Tid(921, "%00000399"))))])), +Blk(Tid(921, "%00000399"), Attrs([Attr("address","0x784")]), Phis([]), +Defs([Def(Tid(927, "%0000039f"), Attrs([Attr("address","0x784"), +Attr("insn","str x0, [sp, #0x18]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(24,64)),Var("R0",Imm(64)),LittleEndian(),64)), +Def(Tid(934, "%000003a6"), Attrs([Attr("address","0x788"), +Attr("insn","ldr x0, [sp, #0x18]")]), Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(24,64)),LittleEndian(),64)), +Def(Tid(939, "%000003ab"), Attrs([Attr("address","0x78C"), +Attr("insn","mov w1, #0xc")]), Var("R1",Imm(64)), Int(12,64)), +Def(Tid(947, "%000003b3"), Attrs([Attr("address","0x790"), +Attr("insn","str w1, [x0]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("R0",Imm(64)),Extract(31,0,Var("R1",Imm(64))),LittleEndian(),32)), +Def(Tid(954, "%000003ba"), Attrs([Attr("address","0x794"), +Attr("insn","ldr x0, [sp, #0x18]")]), Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(24,64)),LittleEndian(),64)), +Def(Tid(960, "%000003c0"), Attrs([Attr("address","0x798"), +Attr("insn","add x0, x0, #0x10")]), Var("R0",Imm(64)), +PLUS(Var("R0",Imm(64)),Int(16,64))), Def(Tid(968, "%000003c8"), + Attrs([Attr("address","0x79C"), Attr("insn","str x0, [sp, #0x20]")]), + Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(32,64)),Var("R0",Imm(64)),LittleEndian(),64)), +Def(Tid(975, "%000003cf"), Attrs([Attr("address","0x7A0"), +Attr("insn","ldr x0, [sp, #0x20]")]), Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(32,64)),LittleEndian(),64)), +Def(Tid(980, "%000003d4"), Attrs([Attr("address","0x7A4"), +Attr("insn","mov w1, #0xd")]), Var("R1",Imm(64)), Int(13,64)), +Def(Tid(988, "%000003dc"), Attrs([Attr("address","0x7A8"), +Attr("insn","str w1, [x0]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("R0",Imm(64)),Extract(31,0,Var("R1",Imm(64))),LittleEndian(),32)), +Def(Tid(995, "%000003e3"), Attrs([Attr("address","0x7AC"), +Attr("insn","ldr x0, [sp, #0x20]")]), Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(32,64)),LittleEndian(),64)), +Def(Tid(1_000, "%000003e8"), Attrs([Attr("address","0x7B0"), +Attr("insn","bl #-0x5c")]), Var("R30",Imm(64)), Int(1972,64))]), +Jmps([Call(Tid(1_002, "%000003ea"), Attrs([Attr("address","0x7B0"), +Attr("insn","bl #-0x5c")]), Int(1,1), +(Direct(Tid(1_803, "@callee")),Direct(Tid(1_004, "%000003ec"))))])), +Blk(Tid(1_004, "%000003ec"), Attrs([Attr("address","0x7B4")]), Phis([]), +Defs([Def(Tid(1_010, "%000003f2"), Attrs([Attr("address","0x7B4"), +Attr("insn","str x0, [sp, #0x28]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(40,64)),Var("R0",Imm(64)),LittleEndian(),64)), +Def(Tid(1_017, "%000003f9"), Attrs([Attr("address","0x7B8"), +Attr("insn","ldr x0, [sp, #0x28]")]), Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(40,64)),LittleEndian(),64)), +Def(Tid(1_022, "%000003fe"), Attrs([Attr("address","0x7BC"), +Attr("insn","mov w1, #0xe")]), Var("R1",Imm(64)), Int(14,64)), +Def(Tid(1_030, "%00000406"), Attrs([Attr("address","0x7C0"), +Attr("insn","str w1, [x0]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("R0",Imm(64)),Extract(31,0,Var("R1",Imm(64))),LittleEndian(),32)), +Def(Tid(1_035, "%0000040b"), Attrs([Attr("address","0x7C4"), +Attr("insn","mov w0, #0x0")]), Var("R0",Imm(64)), Int(0,64)), +Def(Tid(1_042, "%00000412"), Attrs([Attr("address","0x7C8"), +Attr("insn","ldp x29, x30, [sp], #0x30")]), Var("R29",Imm(64)), +Load(Var("mem",Mem(64,8)),Var("R31",Imm(64)),LittleEndian(),64)), +Def(Tid(1_047, "%00000417"), Attrs([Attr("address","0x7C8"), +Attr("insn","ldp x29, x30, [sp], #0x30")]), Var("R30",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(8,64)),LittleEndian(),64)), +Def(Tid(1_051, "%0000041b"), Attrs([Attr("address","0x7C8"), +Attr("insn","ldp x29, x30, [sp], #0x30")]), Var("R31",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(48,64)))]), Jmps([Call(Tid(1_056, "%00000420"), + Attrs([Attr("address","0x7CC"), Attr("insn","ret")]), Int(1,1), +(Indirect(Var("R30",Imm(64))),))]))])), Sub(Tid(1_809, "@malloc"), + Attrs([Attr("address","0x610"), Attr("stub","()"), Attr("malloc","()"), +Attr("c.proto","void* (*)(unsigned long size)")]), "malloc", + Args([Arg(Tid(1_862, "%00000746"), Attrs([Attr("alloc-size","()"), +Attr("c.data","Top:u64"), Attr("c.layout","[unsigned long : 64]"), +Attr("c.type","unsigned long")]), Var("malloc_size",Imm(64)), +Var("R0",Imm(64)), In()), Arg(Tid(1_863, "%00000747"), + Attrs([Attr("warn-unused","()"), Attr("c.data","{} ptr"), +Attr("c.layout","*[ : 8]"), Attr("c.type","void*")]), + Var("malloc_result",Imm(64)), Var("R0",Imm(64)), Out())]), +Blks([Blk(Tid(917, "@malloc"), Attrs([Attr("address","0x610")]), Phis([]), +Defs([Def(Tid(1_399, "%00000577"), Attrs([Attr("address","0x610"), +Attr("insn","adrp x16, #126976")]), Var("R16",Imm(64)), Int(126976,64)), +Def(Tid(1_406, "%0000057e"), Attrs([Attr("address","0x614"), +Attr("insn","ldr x17, [x16, #0xfb8]")]), Var("R17",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R16",Imm(64)),Int(4024,64)),LittleEndian(),64)), +Def(Tid(1_412, "%00000584"), Attrs([Attr("address","0x618"), +Attr("insn","add x16, x16, #0xfb8")]), Var("R16",Imm(64)), +PLUS(Var("R16",Imm(64)),Int(4024,64)))]), Jmps([Call(Tid(1_417, "%00000589"), + Attrs([Attr("address","0x61C"), Attr("insn","br x17")]), Int(1,1), +(Indirect(Var("R17",Imm(64))),))]))])), +Sub(Tid(1_810, "@register_tm_clones"), Attrs([Attr("address","0x6C0"), +Attr("c.proto","signed (*)(void)")]), "register_tm_clones", + Args([Arg(Tid(1_864, "%00000748"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("register_tm_clones_result",Imm(32)), LOW(32,Var("R0",Imm(64))), +Out())]), Blks([Blk(Tid(673, "@register_tm_clones"), + Attrs([Attr("address","0x6C0")]), Phis([]), Defs([Def(Tid(676, "%000002a4"), + Attrs([Attr("address","0x6C0"), Attr("insn","adrp x0, #131072")]), + Var("R0",Imm(64)), Int(131072,64)), Def(Tid(682, "%000002aa"), + Attrs([Attr("address","0x6C4"), Attr("insn","add x0, x0, #0x10")]), + Var("R0",Imm(64)), PLUS(Var("R0",Imm(64)),Int(16,64))), +Def(Tid(687, "%000002af"), Attrs([Attr("address","0x6C8"), +Attr("insn","adrp x1, #131072")]), Var("R1",Imm(64)), Int(131072,64)), +Def(Tid(693, "%000002b5"), Attrs([Attr("address","0x6CC"), +Attr("insn","add x1, x1, #0x10")]), Var("R1",Imm(64)), +PLUS(Var("R1",Imm(64)),Int(16,64))), Def(Tid(700, "%000002bc"), + Attrs([Attr("address","0x6D0"), Attr("insn","sub x1, x1, x0")]), + Var("R1",Imm(64)), +PLUS(PLUS(Var("R1",Imm(64)),NOT(Var("R0",Imm(64)))),Int(1,64))), +Def(Tid(706, "%000002c2"), Attrs([Attr("address","0x6D4"), +Attr("insn","lsr x2, x1, #63")]), Var("R2",Imm(64)), +Concat(Int(0,63),Extract(63,63,Var("R1",Imm(64))))), +Def(Tid(713, "%000002c9"), Attrs([Attr("address","0x6D8"), +Attr("insn","add x1, x2, x1, asr #3")]), Var("R1",Imm(64)), +PLUS(Var("R2",Imm(64)),ARSHIFT(Var("R1",Imm(64)),Int(3,3)))), +Def(Tid(719, "%000002cf"), Attrs([Attr("address","0x6DC"), +Attr("insn","asr x1, x1, #1")]), Var("R1",Imm(64)), +SIGNED(64,Extract(63,1,Var("R1",Imm(64)))))]), +Jmps([Goto(Tid(725, "%000002d5"), Attrs([Attr("address","0x6E0"), +Attr("insn","cbz x1, #0x18")]), EQ(Var("R1",Imm(64)),Int(0,64)), +Direct(Tid(723, "%000002d3"))), Goto(Tid(1_842, "%00000732"), Attrs([]), + Int(1,1), Direct(Tid(1_117, "%0000045d")))])), Blk(Tid(1_117, "%0000045d"), + Attrs([Attr("address","0x6E4")]), Phis([]), +Defs([Def(Tid(1_120, "%00000460"), Attrs([Attr("address","0x6E4"), +Attr("insn","adrp x2, #126976")]), Var("R2",Imm(64)), Int(126976,64)), +Def(Tid(1_127, "%00000467"), Attrs([Attr("address","0x6E8"), +Attr("insn","ldr x2, [x2, #0xff8]")]), Var("R2",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R2",Imm(64)),Int(4088,64)),LittleEndian(),64))]), +Jmps([Goto(Tid(1_132, "%0000046c"), Attrs([Attr("address","0x6EC"), +Attr("insn","cbz x2, #0xc")]), EQ(Var("R2",Imm(64)),Int(0,64)), +Direct(Tid(723, "%000002d3"))), Goto(Tid(1_843, "%00000733"), Attrs([]), + Int(1,1), Direct(Tid(1_136, "%00000470")))])), Blk(Tid(723, "%000002d3"), + Attrs([Attr("address","0x6F8")]), Phis([]), Defs([]), +Jmps([Call(Tid(731, "%000002db"), Attrs([Attr("address","0x6F8"), +Attr("insn","ret")]), Int(1,1), (Indirect(Var("R30",Imm(64))),))])), +Blk(Tid(1_136, "%00000470"), Attrs([Attr("address","0x6F0")]), Phis([]), +Defs([Def(Tid(1_140, "%00000474"), Attrs([Attr("address","0x6F0"), +Attr("insn","mov x16, x2")]), Var("R16",Imm(64)), Var("R2",Imm(64)))]), +Jmps([Call(Tid(1_145, "%00000479"), Attrs([Attr("address","0x6F4"), +Attr("insn","br x16")]), Int(1,1), +(Indirect(Var("R16",Imm(64))),))]))]))]))) \ No newline at end of file diff --git a/examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.bir b/examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.bir new file mode 100755 index 000000000..4329d89ad --- /dev/null +++ b/examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.bir @@ -0,0 +1,279 @@ +00000715: program +000006fd: sub __cxa_finalize(__cxa_finalize_result) +00000716: __cxa_finalize_result :: out u32 = low:32[R0] + +00000459: +00000561: R16 := 0x1F000 +00000568: R17 := mem[R16 + 0xFB0, el]:u64 +0000056e: R16 := R16 + 0xFB0 +00000573: call R17 with noreturn + +000006fe: sub __do_global_dtors_aux(__do_global_dtors_aux_result) +00000717: __do_global_dtors_aux_result :: out u32 = low:32[R0] + +000002df: +000002e3: #3 := R31 - 0x20 +000002e9: mem := mem with [#3, el]:u64 <- R29 +000002ef: mem := mem with [#3 + 8, el]:u64 <- R30 +000002f3: R31 := #3 +000002f9: R29 := R31 +00000301: mem := mem with [R31 + 0x10, el]:u64 <- R19 +00000306: R19 := 0x20000 +0000030d: R0 := pad:64[mem[R19 + 0x10]] +00000314: when 31:0[R0] <> 0 goto %00000312 +000006ff: goto %00000422 + +00000422: +00000425: R0 := 0x1F000 +0000042c: R0 := mem[R0 + 0xFE0, el]:u64 +00000432: when R0 = 0 goto %00000430 +00000700: goto %00000449 + +00000449: +0000044c: R0 := 0x20000 +00000453: R0 := mem[R0 + 8, el]:u64 +00000458: R30 := 0x730 +0000045b: call @__cxa_finalize with return %00000430 + +00000430: +00000438: R30 := 0x734 +0000043a: call @deregister_tm_clones with return %0000043c + +0000043c: +0000043f: R0 := 1 +00000447: mem := mem with [R19 + 0x10] <- 7:0[R0] +00000701: goto %00000312 + +00000312: +0000031c: R19 := mem[R31 + 0x10, el]:u64 +00000323: R29 := mem[R31, el]:u64 +00000328: R30 := mem[R31 + 8, el]:u64 +0000032c: R31 := R31 + 0x20 +00000331: call R30 with noreturn + +00000702: sub __libc_start_main(__libc_start_main_main, __libc_start_main_arg2, __libc_start_main_arg3, __libc_start_main_auxv, __libc_start_main_result) +00000718: __libc_start_main_main :: in u64 = R0 +00000719: __libc_start_main_arg2 :: in u32 = low:32[R1] +0000071a: __libc_start_main_arg3 :: in out u64 = R2 +0000071b: __libc_start_main_auxv :: in out u64 = R3 +0000071c: __libc_start_main_result :: out u32 = low:32[R0] + +00000238: +0000054b: R16 := 0x1F000 +00000552: R17 := mem[R16 + 0xFA8, el]:u64 +00000558: R16 := R16 + 0xFA8 +0000055d: call R17 with noreturn + +00000703: sub _fini(_fini_result) +0000071d: _fini_result :: out u32 = low:32[R0] + +00000034: +0000003a: #0 := R31 - 0x10 +00000040: mem := mem with [#0, el]:u64 <- R29 +00000046: mem := mem with [#0 + 8, el]:u64 <- R30 +0000004a: R31 := #0 +00000050: R29 := R31 +00000057: R29 := mem[R31, el]:u64 +0000005c: R30 := mem[R31 + 8, el]:u64 +00000060: R31 := R31 + 0x10 +00000065: call R30 with noreturn + +00000704: sub _init(_init_result) +0000071e: _init_result :: out u32 = low:32[R0] + +0000063c: +00000642: #6 := R31 - 0x10 +00000648: mem := mem with [#6, el]:u64 <- R29 +0000064e: mem := mem with [#6 + 8, el]:u64 <- R30 +00000652: R31 := #6 +00000658: R29 := R31 +0000065d: R30 := 0x5C8 +0000065f: call @call_weak_fn with return %00000661 + +00000661: +00000666: R29 := mem[R31, el]:u64 +0000066b: R30 := mem[R31 + 8, el]:u64 +0000066f: R31 := R31 + 0x10 +00000674: call R30 with noreturn + +00000705: sub _start(_start_result) +0000071f: _start_result :: out u32 = low:32[R0] + +000001f9: +000001fe: R29 := 0 +00000203: R30 := 0 +00000209: R5 := R0 +00000210: R1 := mem[R31, el]:u64 +00000216: R2 := R31 + 8 +0000021c: R6 := R31 +00000221: R0 := 0x1F000 +00000228: R0 := mem[R0 + 0xFF0, el]:u64 +0000022d: R3 := 0 +00000232: R4 := 0 +00000237: R30 := 0x670 +0000023a: call @__libc_start_main with return %0000023c + +0000023c: +0000023f: R30 := 0x674 +00000242: call @abort with return %00000706 + +00000706: +00000707: call @call_weak_fn with noreturn + +00000708: sub abort() + + +00000240: +000005a3: R16 := 0x1F000 +000005aa: R17 := mem[R16 + 0xFC8, el]:u64 +000005b0: R16 := R16 + 0xFC8 +000005b5: call R17 with noreturn + +00000709: sub call_weak_fn(call_weak_fn_result) +00000720: call_weak_fn_result :: out u32 = low:32[R0] + +00000244: +00000247: R0 := 0x1F000 +0000024e: R0 := mem[R0 + 0xFE8, el]:u64 +00000254: when R0 = 0 goto %00000252 +0000070a: goto %00000499 + +00000252: +0000025a: call R30 with noreturn + +00000499: +0000049c: goto @__gmon_start__ + +0000049a: +0000058d: R16 := 0x1F000 +00000594: R17 := mem[R16 + 0xFC0, el]:u64 +0000059a: R16 := R16 + 0xFC0 +0000059f: call R17 with noreturn + +0000070b: sub callee(callee_result) +00000721: callee_result :: out u32 = low:32[R0] + +0000033b: +0000033f: R31 := R31 - 0x20 +00000347: mem := mem with [R31 + 8, el]:u64 <- R0 +0000034e: R0 := mem[R31 + 8, el]:u64 +00000354: R0 := R0 + 0x10 +0000035c: mem := mem with [R31 + 0x18, el]:u64 <- R0 +00000363: R0 := mem[R31 + 0x18, el]:u64 +00000369: R31 := R31 + 0x20 +0000036e: call R30 with noreturn + +0000070c: sub deregister_tm_clones(deregister_tm_clones_result) +00000722: deregister_tm_clones_result :: out u32 = low:32[R0] + +00000260: +00000263: R0 := 0x20000 +00000269: R0 := R0 + 0x10 +0000026e: R1 := 0x20000 +00000274: R1 := R1 + 0x10 +0000027a: #1 := ~R0 +0000027f: #2 := R1 + ~R0 +00000285: VF := extend:65[#2 + 1] <> extend:65[R1] + extend:65[#1] + 1 +0000028b: CF := pad:65[#2 + 1] <> pad:65[R1] + pad:65[#1] + 1 +0000028f: ZF := #2 + 1 = 0 +00000293: NF := 63:63[#2 + 1] +00000299: when ZF goto %00000297 +0000070d: goto %0000047b + +0000047b: +0000047e: R1 := 0x1F000 +00000485: R1 := mem[R1 + 0xFD8, el]:u64 +0000048a: when R1 = 0 goto %00000297 +0000070e: goto %0000048e + +00000297: +0000029f: call R30 with noreturn + +0000048e: +00000492: R16 := R1 +00000497: call R16 with noreturn + +0000070f: sub frame_dummy(frame_dummy_result) +00000723: frame_dummy_result :: out u32 = low:32[R0] + +00000337: +00000339: call @register_tm_clones with noreturn + +00000710: sub main(main_argc, main_argv, main_result) +00000724: main_argc :: in u32 = low:32[R0] +00000725: main_argv :: in out u64 = R1 +00000726: main_result :: out u32 = low:32[R0] + +00000370: +00000374: #4 := R31 - 0x30 +0000037a: mem := mem with [#4, el]:u64 <- R29 +00000380: mem := mem with [#4 + 8, el]:u64 <- R30 +00000384: R31 := #4 +0000038a: R29 := R31 +0000038f: R0 := 0x14 +00000394: R30 := 0x784 +00000397: call @malloc with return %00000399 + +00000399: +0000039f: mem := mem with [R31 + 0x18, el]:u64 <- R0 +000003a6: R0 := mem[R31 + 0x18, el]:u64 +000003ab: R1 := 0xC +000003b3: mem := mem with [R0, el]:u32 <- 31:0[R1] +000003ba: R0 := mem[R31 + 0x18, el]:u64 +000003c0: R0 := R0 + 0x10 +000003c8: mem := mem with [R31 + 0x20, el]:u64 <- R0 +000003cf: R0 := mem[R31 + 0x20, el]:u64 +000003d4: R1 := 0xD +000003dc: mem := mem with [R0, el]:u32 <- 31:0[R1] +000003e3: R0 := mem[R31 + 0x20, el]:u64 +000003e8: R30 := 0x7B4 +000003ea: call @callee with return %000003ec + +000003ec: +000003f2: mem := mem with [R31 + 0x28, el]:u64 <- R0 +000003f9: R0 := mem[R31 + 0x28, el]:u64 +000003fe: R1 := 0xE +00000406: mem := mem with [R0, el]:u32 <- 31:0[R1] +0000040b: R0 := 0 +00000412: R29 := mem[R31, el]:u64 +00000417: R30 := mem[R31 + 8, el]:u64 +0000041b: R31 := R31 + 0x30 +00000420: call R30 with noreturn + +00000711: sub malloc(malloc_size, malloc_result) +00000727: malloc_size :: in u64 = R0 +00000728: malloc_result :: out u64 = R0 + +00000395: +00000577: R16 := 0x1F000 +0000057e: R17 := mem[R16 + 0xFB8, el]:u64 +00000584: R16 := R16 + 0xFB8 +00000589: call R17 with noreturn + +00000712: sub register_tm_clones(register_tm_clones_result) +00000729: register_tm_clones_result :: out u32 = low:32[R0] + +000002a1: +000002a4: R0 := 0x20000 +000002aa: R0 := R0 + 0x10 +000002af: R1 := 0x20000 +000002b5: R1 := R1 + 0x10 +000002bc: R1 := R1 + ~R0 + 1 +000002c2: R2 := 0.63:63[R1] +000002c9: R1 := R2 + (R1 ~>> 3) +000002cf: R1 := extend:64[63:1[R1]] +000002d5: when R1 = 0 goto %000002d3 +00000713: goto %0000045d + +0000045d: +00000460: R2 := 0x1F000 +00000467: R2 := mem[R2 + 0xFF8, el]:u64 +0000046c: when R2 = 0 goto %000002d3 +00000714: goto %00000470 + +000002d3: +000002db: call R30 with noreturn + +00000470: +00000474: R16 := R2 +00000479: call R16 with noreturn diff --git a/examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.c b/examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.c new file mode 100644 index 000000000..1d02bae06 --- /dev/null +++ b/examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.c @@ -0,0 +1,14 @@ +#include +int* callee(int* arg) { + int* ret = arg + sizeof(int); // this is wrong + return ret; +} + +int main() { + int *bar = malloc(5 * sizeof(int)); + *bar = 12; + int* foo = bar + sizeof(int); + *foo = 13; + int* bat = callee(foo); + *bat = 14; +} diff --git a/examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf b/examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf new file mode 100755 index 000000000..3fb1fea4c --- /dev/null +++ b/examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf @@ -0,0 +1,123 @@ + +Relocation section '.rela.dyn' at offset 0x480 contains 8 entries: + Offset Info Type Symbol's Value Symbol's Name + Addend +000000000001fd90 0000000000000403 R_AARCH64_RELATIVE 750 +000000000001fd98 0000000000000403 R_AARCH64_RELATIVE 700 +000000000001fff0 0000000000000403 R_AARCH64_RELATIVE 774 +0000000000020008 0000000000000403 R_AARCH64_RELATIVE 20008 +000000000001ffd8 0000000400000401 R_AARCH64_GLOB_DAT 0000000000000000 _ITM_deregisterTMCloneTable + 0 +000000000001ffe0 0000000500000401 R_AARCH64_GLOB_DAT 0000000000000000 __cxa_finalize@GLIBC_2.17 + 0 +000000000001ffe8 0000000700000401 R_AARCH64_GLOB_DAT 0000000000000000 __gmon_start__ + 0 +000000000001fff8 0000000900000401 R_AARCH64_GLOB_DAT 0000000000000000 _ITM_registerTMCloneTable + 0 + +Relocation section '.rela.plt' at offset 0x540 contains 5 entries: + Offset Info Type Symbol's Value Symbol's Name + Addend +000000000001ffa8 0000000300000402 R_AARCH64_JUMP_SLOT 0000000000000000 __libc_start_main@GLIBC_2.34 + 0 +000000000001ffb0 0000000500000402 R_AARCH64_JUMP_SLOT 0000000000000000 __cxa_finalize@GLIBC_2.17 + 0 +000000000001ffb8 0000000600000402 R_AARCH64_JUMP_SLOT 0000000000000000 malloc@GLIBC_2.17 + 0 +000000000001ffc0 0000000700000402 R_AARCH64_JUMP_SLOT 0000000000000000 __gmon_start__ + 0 +000000000001ffc8 0000000800000402 R_AARCH64_JUMP_SLOT 0000000000000000 abort@GLIBC_2.17 + 0 + +Symbol table '.dynsym' contains 10 entries: + Num: Value Size Type Bind Vis Ndx Name + 0: 0000000000000000 0 NOTYPE LOCAL DEFAULT UND + 1: 00000000000005b8 0 SECTION LOCAL DEFAULT 11 .init + 2: 0000000000020000 0 SECTION LOCAL DEFAULT 22 .data + 3: 0000000000000000 0 FUNC GLOBAL DEFAULT UND __libc_start_main@GLIBC_2.34 (2) + 4: 0000000000000000 0 NOTYPE WEAK DEFAULT UND _ITM_deregisterTMCloneTable + 5: 0000000000000000 0 FUNC WEAK DEFAULT UND __cxa_finalize@GLIBC_2.17 (3) + 6: 0000000000000000 0 FUNC GLOBAL DEFAULT UND malloc@GLIBC_2.17 (3) + 7: 0000000000000000 0 NOTYPE WEAK DEFAULT UND __gmon_start__ + 8: 0000000000000000 0 FUNC GLOBAL DEFAULT UND abort@GLIBC_2.17 (3) + 9: 0000000000000000 0 NOTYPE WEAK DEFAULT UND _ITM_registerTMCloneTable + +Symbol table '.symtab' contains 88 entries: + Num: Value Size Type Bind Vis Ndx Name + 0: 0000000000000000 0 NOTYPE LOCAL DEFAULT UND + 1: 0000000000000238 0 SECTION LOCAL DEFAULT 1 .interp + 2: 0000000000000254 0 SECTION LOCAL DEFAULT 2 .note.gnu.build-id + 3: 0000000000000278 0 SECTION LOCAL DEFAULT 3 .note.ABI-tag + 4: 0000000000000298 0 SECTION LOCAL DEFAULT 4 .gnu.hash + 5: 00000000000002b8 0 SECTION LOCAL DEFAULT 5 .dynsym + 6: 00000000000003a8 0 SECTION LOCAL DEFAULT 6 .dynstr + 7: 000000000000043c 0 SECTION LOCAL DEFAULT 7 .gnu.version + 8: 0000000000000450 0 SECTION LOCAL DEFAULT 8 .gnu.version_r + 9: 0000000000000480 0 SECTION LOCAL DEFAULT 9 .rela.dyn + 10: 0000000000000540 0 SECTION LOCAL DEFAULT 10 .rela.plt + 11: 00000000000005b8 0 SECTION LOCAL DEFAULT 11 .init + 12: 00000000000005d0 0 SECTION LOCAL DEFAULT 12 .plt + 13: 0000000000000640 0 SECTION LOCAL DEFAULT 13 .text + 14: 00000000000007d0 0 SECTION LOCAL DEFAULT 14 .fini + 15: 00000000000007e4 0 SECTION LOCAL DEFAULT 15 .rodata + 16: 00000000000007e8 0 SECTION LOCAL DEFAULT 16 .eh_frame_hdr + 17: 0000000000000830 0 SECTION LOCAL DEFAULT 17 .eh_frame + 18: 000000000001fd90 0 SECTION LOCAL DEFAULT 18 .init_array + 19: 000000000001fd98 0 SECTION LOCAL DEFAULT 19 .fini_array + 20: 000000000001fda0 0 SECTION LOCAL DEFAULT 20 .dynamic + 21: 000000000001ff90 0 SECTION LOCAL DEFAULT 21 .got + 22: 0000000000020000 0 SECTION LOCAL DEFAULT 22 .data + 23: 0000000000020010 0 SECTION LOCAL DEFAULT 23 .bss + 24: 0000000000000000 0 SECTION LOCAL DEFAULT 24 .comment + 25: 0000000000000000 0 FILE LOCAL DEFAULT ABS Scrt1.o + 26: 0000000000000278 0 NOTYPE LOCAL DEFAULT 3 $d + 27: 0000000000000278 32 OBJECT LOCAL DEFAULT 3 __abi_tag + 28: 0000000000000640 0 NOTYPE LOCAL DEFAULT 13 $x + 29: 0000000000000844 0 NOTYPE LOCAL DEFAULT 17 $d + 30: 00000000000007e4 0 NOTYPE LOCAL DEFAULT 15 $d + 31: 0000000000000000 0 FILE LOCAL DEFAULT ABS crti.o + 32: 0000000000000674 0 NOTYPE LOCAL DEFAULT 13 $x + 33: 0000000000000674 20 FUNC LOCAL DEFAULT 13 call_weak_fn + 34: 00000000000005b8 0 NOTYPE LOCAL DEFAULT 11 $x + 35: 00000000000007d0 0 NOTYPE LOCAL DEFAULT 14 $x + 36: 0000000000000000 0 FILE LOCAL DEFAULT ABS crtn.o + 37: 00000000000005c8 0 NOTYPE LOCAL DEFAULT 11 $x + 38: 00000000000007dc 0 NOTYPE LOCAL DEFAULT 14 $x + 39: 0000000000000000 0 FILE LOCAL DEFAULT ABS crtstuff.c + 40: 0000000000000690 0 NOTYPE LOCAL DEFAULT 13 $x + 41: 0000000000000690 0 FUNC LOCAL DEFAULT 13 deregister_tm_clones + 42: 00000000000006c0 0 FUNC LOCAL DEFAULT 13 register_tm_clones + 43: 0000000000020008 0 NOTYPE LOCAL DEFAULT 22 $d + 44: 0000000000000700 0 FUNC LOCAL DEFAULT 13 __do_global_dtors_aux + 45: 0000000000020010 1 OBJECT LOCAL DEFAULT 23 completed.0 + 46: 000000000001fd98 0 NOTYPE LOCAL DEFAULT 19 $d + 47: 000000000001fd98 0 OBJECT LOCAL DEFAULT 19 __do_global_dtors_aux_fini_array_entry + 48: 0000000000000750 0 FUNC LOCAL DEFAULT 13 frame_dummy + 49: 000000000001fd90 0 NOTYPE LOCAL DEFAULT 18 $d + 50: 000000000001fd90 0 OBJECT LOCAL DEFAULT 18 __frame_dummy_init_array_entry + 51: 0000000000000858 0 NOTYPE LOCAL DEFAULT 17 $d + 52: 0000000000020010 0 NOTYPE LOCAL DEFAULT 23 $d + 53: 0000000000000000 0 FILE LOCAL DEFAULT ABS interproc_pointer_arithmetic.c + 54: 0000000000000754 0 NOTYPE LOCAL DEFAULT 13 $x + 55: 00000000000008b8 0 NOTYPE LOCAL DEFAULT 17 $d + 56: 0000000000000000 0 FILE LOCAL DEFAULT ABS crtstuff.c + 57: 00000000000008f0 0 NOTYPE LOCAL DEFAULT 17 $d + 58: 00000000000008f0 0 OBJECT LOCAL DEFAULT 17 __FRAME_END__ + 59: 0000000000000000 0 FILE LOCAL DEFAULT ABS + 60: 000000000001fda0 0 OBJECT LOCAL DEFAULT ABS _DYNAMIC + 61: 00000000000007e8 0 NOTYPE LOCAL DEFAULT 16 __GNU_EH_FRAME_HDR + 62: 000000000001ffd0 0 OBJECT LOCAL DEFAULT ABS _GLOBAL_OFFSET_TABLE_ + 63: 00000000000005d0 0 NOTYPE LOCAL DEFAULT 12 $x + 64: 0000000000000000 0 FUNC GLOBAL DEFAULT UND __libc_start_main@GLIBC_2.34 + 65: 0000000000000000 0 NOTYPE WEAK DEFAULT UND _ITM_deregisterTMCloneTable + 66: 0000000000020000 0 NOTYPE WEAK DEFAULT 22 data_start + 67: 0000000000020010 0 NOTYPE GLOBAL DEFAULT 23 __bss_start__ + 68: 0000000000000000 0 FUNC WEAK DEFAULT UND __cxa_finalize@GLIBC_2.17 + 69: 0000000000020018 0 NOTYPE GLOBAL DEFAULT 23 _bss_end__ + 70: 0000000000020010 0 NOTYPE GLOBAL DEFAULT 22 _edata + 71: 00000000000007d0 0 FUNC GLOBAL HIDDEN 14 _fini + 72: 0000000000020018 0 NOTYPE GLOBAL DEFAULT 23 __bss_end__ + 73: 0000000000000000 0 FUNC GLOBAL DEFAULT UND malloc@GLIBC_2.17 + 74: 0000000000020000 0 NOTYPE GLOBAL DEFAULT 22 __data_start + 75: 0000000000000000 0 NOTYPE WEAK DEFAULT UND __gmon_start__ + 76: 0000000000020008 0 OBJECT GLOBAL HIDDEN 22 __dso_handle + 77: 0000000000000000 0 FUNC GLOBAL DEFAULT UND abort@GLIBC_2.17 + 78: 00000000000007e4 4 OBJECT GLOBAL DEFAULT 15 _IO_stdin_used + 79: 0000000000020018 0 NOTYPE GLOBAL DEFAULT 23 _end + 80: 0000000000000640 52 FUNC GLOBAL DEFAULT 13 _start + 81: 0000000000020018 0 NOTYPE GLOBAL DEFAULT 23 __end__ + 82: 0000000000020010 0 NOTYPE GLOBAL DEFAULT 23 __bss_start + 83: 0000000000000774 92 FUNC GLOBAL DEFAULT 13 main + 84: 0000000000000754 32 FUNC GLOBAL DEFAULT 13 callee + 85: 0000000000020010 0 OBJECT GLOBAL HIDDEN 22 __TMC_END__ + 86: 0000000000000000 0 NOTYPE WEAK DEFAULT UND _ITM_registerTMCloneTable + 87: 00000000000005b8 0 FUNC GLOBAL HIDDEN 11 _init diff --git a/examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.adt b/examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.adt new file mode 100644 index 000000000..b112faabc --- /dev/null +++ b/examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.adt @@ -0,0 +1,586 @@ +Project(Attrs([Attr("filename","unsafe_pointer_arithmetic.out"), +Attr("image-specification","(declare abi (name str))\n(declare arch (name str))\n(declare base-address (addr int))\n(declare bias (off int))\n(declare bits (size int))\n(declare code-region (addr int) (size int) (off int))\n(declare code-start (addr int))\n(declare entry-point (addr int))\n(declare external-reference (addr int) (name str))\n(declare format (name str))\n(declare is-executable (flag bool))\n(declare is-little-endian (flag bool))\n(declare llvm:base-address (addr int))\n(declare llvm:code-entry (name str) (off int) (size int))\n(declare llvm:coff-import-library (name str))\n(declare llvm:coff-virtual-section-header (name str) (addr int) (size int))\n(declare llvm:elf-program-header (name str) (off int) (size int))\n(declare llvm:elf-program-header-flags (name str) (ld bool) (r bool) \n (w bool) (x bool))\n(declare llvm:elf-virtual-program-header (name str) (addr int) (size int))\n(declare llvm:entry-point (addr int))\n(declare llvm:macho-symbol (name str) (value int))\n(declare llvm:name-reference (at int) (name str))\n(declare llvm:relocation (at int) (addr int))\n(declare llvm:section-entry (name str) (addr int) (size int) (off int))\n(declare llvm:section-flags (name str) (r bool) (w bool) (x bool))\n(declare llvm:segment-command (name str) (off int) (size int))\n(declare llvm:segment-command-flags (name str) (r bool) (w bool) (x bool))\n(declare llvm:symbol-entry (name str) (addr int) (size int) (off int)\n (value int))\n(declare llvm:virtual-segment-command (name str) (addr int) (size int))\n(declare mapped (addr int) (size int) (off int))\n(declare named-region (addr int) (size int) (name str))\n(declare named-symbol (addr int) (name str))\n(declare require (name str))\n(declare section (addr int) (size int))\n(declare segment (addr int) (size int) (r bool) (w bool) (x bool))\n(declare subarch (name str))\n(declare symbol-chunk (addr int) (size int) (root int))\n(declare symbol-value (addr int) (value int))\n(declare system (name str))\n(declare vendor (name str))\n\n(abi unknown)\n(arch aarch64)\n(base-address 0)\n(bias 0)\n(bits 64)\n(code-region 1976 20 1976)\n(code-region 1600 376 1600)\n(code-region 1488 112 1488)\n(code-region 1464 24 1464)\n(code-start 1652)\n(code-start 1600)\n(code-start 1876)\n(entry-point 1600)\n(external-reference 131032 _ITM_deregisterTMCloneTable)\n(external-reference 131040 __cxa_finalize)\n(external-reference 131048 __gmon_start__)\n(external-reference 131064 _ITM_registerTMCloneTable)\n(external-reference 130984 __libc_start_main)\n(external-reference 130992 __cxa_finalize)\n(external-reference 131000 malloc)\n(external-reference 131008 __gmon_start__)\n(external-reference 131016 abort)\n(format elf)\n(is-executable true)\n(is-little-endian true)\n(llvm:base-address 0)\n(llvm:code-entry abort 0 0)\n(llvm:code-entry malloc 0 0)\n(llvm:code-entry __cxa_finalize 0 0)\n(llvm:code-entry __libc_start_main 0 0)\n(llvm:code-entry _init 1464 0)\n(llvm:code-entry main 1876 100)\n(llvm:code-entry _start 1600 52)\n(llvm:code-entry abort@GLIBC_2.17 0 0)\n(llvm:code-entry malloc@GLIBC_2.17 0 0)\n(llvm:code-entry _fini 1976 0)\n(llvm:code-entry __cxa_finalize@GLIBC_2.17 0 0)\n(llvm:code-entry __libc_start_main@GLIBC_2.34 0 0)\n(llvm:code-entry frame_dummy 1872 0)\n(llvm:code-entry __do_global_dtors_aux 1792 0)\n(llvm:code-entry register_tm_clones 1728 0)\n(llvm:code-entry deregister_tm_clones 1680 0)\n(llvm:code-entry call_weak_fn 1652 20)\n(llvm:code-entry .fini 1976 20)\n(llvm:code-entry .text 1600 376)\n(llvm:code-entry .plt 1488 112)\n(llvm:code-entry .init 1464 24)\n(llvm:elf-program-header 08 64912 624)\n(llvm:elf-program-header 07 0 0)\n(llvm:elf-program-header 06 2000 60)\n(llvm:elf-program-header 05 596 68)\n(llvm:elf-program-header 04 64928 496)\n(llvm:elf-program-header 03 64912 640)\n(llvm:elf-program-header 02 0 2236)\n(llvm:elf-program-header 01 568 27)\n(llvm:elf-program-header 00 64 504)\n(llvm:elf-program-header-flags 08 false true false false)\n(llvm:elf-program-header-flags 07 false true true false)\n(llvm:elf-program-header-flags 06 false true false false)\n(llvm:elf-program-header-flags 05 false true false false)\n(llvm:elf-program-header-flags 04 false true true false)\n(llvm:elf-program-header-flags 03 true true true false)\n(llvm:elf-program-header-flags 02 true true false true)\n(llvm:elf-program-header-flags 01 false true false false)\n(llvm:elf-program-header-flags 00 false true false false)\n(llvm:elf-virtual-program-header 08 130448 624)\n(llvm:elf-virtual-program-header 07 0 0)\n(llvm:elf-virtual-program-header 06 2000 60)\n(llvm:elf-virtual-program-header 05 596 68)\n(llvm:elf-virtual-program-header 04 130464 496)\n(llvm:elf-virtual-program-header 03 130448 648)\n(llvm:elf-virtual-program-header 02 0 2236)\n(llvm:elf-virtual-program-header 01 568 27)\n(llvm:elf-virtual-program-header 00 64 504)\n(llvm:entry-point 1600)\n(llvm:name-reference 131016 abort)\n(llvm:name-reference 131008 __gmon_start__)\n(llvm:name-reference 131000 malloc)\n(llvm:name-reference 130992 __cxa_finalize)\n(llvm:name-reference 130984 __libc_start_main)\n(llvm:name-reference 131064 _ITM_registerTMCloneTable)\n(llvm:name-reference 131048 __gmon_start__)\n(llvm:name-reference 131040 __cxa_finalize)\n(llvm:name-reference 131032 _ITM_deregisterTMCloneTable)\n(llvm:section-entry .shstrtab 0 250 68267)\n(llvm:section-entry .strtab 0 579 67688)\n(llvm:section-entry .symtab 0 2088 65600)\n(llvm:section-entry .comment 0 43 65552)\n(llvm:section-entry .bss 131088 8 65552)\n(llvm:section-entry .data 131072 16 65536)\n(llvm:section-entry .got 130960 112 65424)\n(llvm:section-entry .dynamic 130464 496 64928)\n(llvm:section-entry .fini_array 130456 8 64920)\n(llvm:section-entry .init_array 130448 8 64912)\n(llvm:section-entry .eh_frame 2064 172 2064)\n(llvm:section-entry .eh_frame_hdr 2000 60 2000)\n(llvm:section-entry .rodata 1996 4 1996)\n(llvm:section-entry .fini 1976 20 1976)\n(llvm:section-entry .text 1600 376 1600)\n(llvm:section-entry .plt 1488 112 1488)\n(llvm:section-entry .init 1464 24 1464)\n(llvm:section-entry .rela.plt 1344 120 1344)\n(llvm:section-entry .rela.dyn 1152 192 1152)\n(llvm:section-entry .gnu.version_r 1104 48 1104)\n(llvm:section-entry .gnu.version 1084 20 1084)\n(llvm:section-entry .dynstr 936 148 936)\n(llvm:section-entry .dynsym 696 240 696)\n(llvm:section-entry .gnu.hash 664 28 664)\n(llvm:section-entry .note.ABI-tag 632 32 632)\n(llvm:section-entry .note.gnu.build-id 596 36 596)\n(llvm:section-entry .interp 568 27 568)\n(llvm:section-flags .shstrtab true false false)\n(llvm:section-flags .strtab true false false)\n(llvm:section-flags .symtab true false false)\n(llvm:section-flags .comment true false false)\n(llvm:section-flags .bss true true false)\n(llvm:section-flags .data true true false)\n(llvm:section-flags .got true true false)\n(llvm:section-flags .dynamic true true false)\n(llvm:section-flags .fini_array true true false)\n(llvm:section-flags .init_array true true false)\n(llvm:section-flags .eh_frame true false false)\n(llvm:section-flags .eh_frame_hdr true false false)\n(llvm:section-flags .rodata true false false)\n(llvm:section-flags .fini true false true)\n(llvm:section-flags .text true false true)\n(llvm:section-flags .plt true false true)\n(llvm:section-flags .init true false true)\n(llvm:section-flags .rela.plt true false false)\n(llvm:section-flags .rela.dyn true false false)\n(llvm:section-flags .gnu.version_r true false false)\n(llvm:section-flags .gnu.version true false false)\n(llvm:section-flags .dynstr true false false)\n(llvm:section-flags .dynsym true false false)\n(llvm:section-flags .gnu.hash true false false)\n(llvm:section-flags .note.ABI-tag true false false)\n(llvm:section-flags .note.gnu.build-id true false false)\n(llvm:section-flags .interp true false false)\n(llvm:symbol-entry abort 0 0 0 0)\n(llvm:symbol-entry malloc 0 0 0 0)\n(llvm:symbol-entry __cxa_finalize 0 0 0 0)\n(llvm:symbol-entry __libc_start_main 0 0 0 0)\n(llvm:symbol-entry _init 1464 0 1464 1464)\n(llvm:symbol-entry main 1876 100 1876 1876)\n(llvm:symbol-entry _start 1600 52 1600 1600)\n(llvm:symbol-entry abort@GLIBC_2.17 0 0 0 0)\n(llvm:symbol-entry malloc@GLIBC_2.17 0 0 0 0)\n(llvm:symbol-entry _fini 1976 0 1976 1976)\n(llvm:symbol-entry __cxa_finalize@GLIBC_2.17 0 0 0 0)\n(llvm:symbol-entry __libc_start_main@GLIBC_2.34 0 0 0 0)\n(llvm:symbol-entry frame_dummy 1872 0 1872 1872)\n(llvm:symbol-entry __do_global_dtors_aux 1792 0 1792 1792)\n(llvm:symbol-entry register_tm_clones 1728 0 1728 1728)\n(llvm:symbol-entry deregister_tm_clones 1680 0 1680 1680)\n(llvm:symbol-entry call_weak_fn 1652 20 1652 1652)\n(mapped 0 2236 0)\n(mapped 130448 640 64912)\n(named-region 0 2236 02)\n(named-region 130448 648 03)\n(named-region 568 27 .interp)\n(named-region 596 36 .note.gnu.build-id)\n(named-region 632 32 .note.ABI-tag)\n(named-region 664 28 .gnu.hash)\n(named-region 696 240 .dynsym)\n(named-region 936 148 .dynstr)\n(named-region 1084 20 .gnu.version)\n(named-region 1104 48 .gnu.version_r)\n(named-region 1152 192 .rela.dyn)\n(named-region 1344 120 .rela.plt)\n(named-region 1464 24 .init)\n(named-region 1488 112 .plt)\n(named-region 1600 376 .text)\n(named-region 1976 20 .fini)\n(named-region 1996 4 .rodata)\n(named-region 2000 60 .eh_frame_hdr)\n(named-region 2064 172 .eh_frame)\n(named-region 130448 8 .init_array)\n(named-region 130456 8 .fini_array)\n(named-region 130464 496 .dynamic)\n(named-region 130960 112 .got)\n(named-region 131072 16 .data)\n(named-region 131088 8 .bss)\n(named-region 0 43 .comment)\n(named-region 0 2088 .symtab)\n(named-region 0 579 .strtab)\n(named-region 0 250 .shstrtab)\n(named-symbol 1652 call_weak_fn)\n(named-symbol 1680 deregister_tm_clones)\n(named-symbol 1728 register_tm_clones)\n(named-symbol 1792 __do_global_dtors_aux)\n(named-symbol 1872 frame_dummy)\n(named-symbol 0 __libc_start_main@GLIBC_2.34)\n(named-symbol 0 __cxa_finalize@GLIBC_2.17)\n(named-symbol 1976 _fini)\n(named-symbol 0 malloc@GLIBC_2.17)\n(named-symbol 0 abort@GLIBC_2.17)\n(named-symbol 1600 _start)\n(named-symbol 1876 main)\n(named-symbol 1464 _init)\n(named-symbol 0 __libc_start_main)\n(named-symbol 0 __cxa_finalize)\n(named-symbol 0 malloc)\n(named-symbol 0 abort)\n(require libc.so.6)\n(section 568 27)\n(section 596 36)\n(section 632 32)\n(section 664 28)\n(section 696 240)\n(section 936 148)\n(section 1084 20)\n(section 1104 48)\n(section 1152 192)\n(section 1344 120)\n(section 1464 24)\n(section 1488 112)\n(section 1600 376)\n(section 1976 20)\n(section 1996 4)\n(section 2000 60)\n(section 2064 172)\n(section 130448 8)\n(section 130456 8)\n(section 130464 496)\n(section 130960 112)\n(section 131072 16)\n(section 131088 8)\n(section 0 43)\n(section 0 2088)\n(section 0 579)\n(section 0 250)\n(segment 0 2236 true false true)\n(segment 130448 648 true true false)\n(subarch v8)\n(symbol-chunk 1652 20 1652)\n(symbol-chunk 1600 52 1600)\n(symbol-chunk 1876 100 1876)\n(symbol-value 1652 1652)\n(symbol-value 1680 1680)\n(symbol-value 1728 1728)\n(symbol-value 1792 1792)\n(symbol-value 1872 1872)\n(symbol-value 1976 1976)\n(symbol-value 1600 1600)\n(symbol-value 1876 1876)\n(symbol-value 1464 1464)\n(symbol-value 0 0)\n(system \"\")\n(vendor \"\")\n"), +Attr("abi-name","aarch64-linux-gnu-elf")]), +Sections([Section(".shstrtab", 0x0, "\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\xb7\x00\x01\x00\x00\x00\x40\x06\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\xa8\x0b\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x40\x00\x38\x00\x09\x00\x40\x00\x1c\x00\x1b\x00\x06\x00\x00\x00\x04\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\xf8\x01\x00\x00\x00\x00\x00\x00\xf8\x01\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xbc\x08\x00\x00\x00\x00\x00\x00\xbc\x08\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x06\x00\x00\x00\x90\xfd\x00\x00\x00\x00\x00\x00\x90\xfd"), +Section(".strtab", 0x0, "\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\xb7\x00\x01\x00\x00\x00\x40\x06\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\xa8\x0b\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x40\x00\x38\x00\x09\x00\x40\x00\x1c\x00\x1b\x00\x06\x00\x00\x00\x04\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\xf8\x01\x00\x00\x00\x00\x00\x00\xf8\x01\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xbc\x08\x00\x00\x00\x00\x00\x00\xbc\x08\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x06\x00\x00\x00\x90\xfd\x00\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x80\x02\x00\x00\x00\x00\x00\x00\x88\x02\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x06\x00\x00\x00\xa0\xfd\x00\x00\x00\x00\x00\x00\xa0\xfd\x01\x00\x00\x00\x00\x00\xa0\xfd\x01\x00\x00\x00\x00\x00\xf0\x01\x00\x00\x00\x00\x00\x00\xf0\x01\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00\x54\x02\x00\x00\x00\x00\x00\x00\x54\x02\x00\x00\x00\x00\x00\x00\x54\x02\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x50\xe5\x74\x64\x04\x00\x00\x00\xd0\x07\x00\x00\x00\x00\x00\x00\xd0\x07\x00\x00\x00\x00\x00\x00\xd0\x07\x00\x00\x00\x00\x00\x00\x3c\x00\x00\x00\x00\x00\x00\x00\x3c\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x51\xe5\x74\x64\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x52\xe5\x74\x64\x04\x00\x00\x00\x90\xfd\x00\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x70\x02\x00\x00\x00\x00\x00\x00\x70\x02\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x2f\x6c\x69\x62\x2f\x6c\x64\x2d\x6c\x69\x6e"), +Section(".symtab", 0x0, "\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\xb7\x00\x01\x00\x00\x00\x40\x06\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\xa8\x0b\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x40\x00\x38\x00\x09\x00\x40\x00\x1c\x00\x1b\x00\x06\x00\x00\x00\x04\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\xf8\x01\x00\x00\x00\x00\x00\x00\xf8\x01\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xbc\x08\x00\x00\x00\x00\x00\x00\xbc\x08\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x06\x00\x00\x00\x90\xfd\x00\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x80\x02\x00\x00\x00\x00\x00\x00\x88\x02\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x06\x00\x00\x00\xa0\xfd\x00\x00\x00\x00\x00\x00\xa0\xfd\x01\x00\x00\x00\x00\x00\xa0\xfd\x01\x00\x00\x00\x00\x00\xf0\x01\x00\x00\x00\x00\x00\x00\xf0\x01\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00\x54\x02\x00\x00\x00\x00\x00\x00\x54\x02\x00\x00\x00\x00\x00\x00\x54\x02\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x50\xe5\x74\x64\x04\x00\x00\x00\xd0\x07\x00\x00\x00\x00\x00\x00\xd0\x07\x00\x00\x00\x00\x00\x00\xd0\x07\x00\x00\x00\x00\x00\x00\x3c\x00\x00\x00\x00\x00\x00\x00\x3c\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x51\xe5\x74\x64\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x52\xe5\x74\x64\x04\x00\x00\x00\x90\xfd\x00\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x70\x02\x00\x00\x00\x00\x00\x00\x70\x02\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x2f\x6c\x69\x62\x2f\x6c\x64\x2d\x6c\x69\x6e\x75\x78\x2d\x61\x61\x72\x63\x68\x36\x34\x2e\x73\x6f\x2e\x31\x00\x00\x04\x00\x00\x00\x14\x00\x00\x00\x03\x00\x00\x00\x47\x4e\x55\x00\xe7\x83\xd0\x0b\x65\xff\x69\x99\x15\x9f\x13\x77\x3c\xa9\xc0\x82\xdf\x41\xac\xfa\x04\x00\x00\x00\x10\x00\x00\x00\x01\x00\x00\x00\x47\x4e\x55\x00\x00\x00\x00\x00\x03\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x0b\x00\xb8\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x16\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x4f\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1a\x00\x00\x00\x22\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x6b\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x29\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7a\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x6d\x61\x6c\x6c\x6f\x63\x00\x5f\x5f\x6c\x69\x62\x63\x5f\x73\x74\x61\x72\x74\x5f\x6d\x61\x69\x6e\x00\x5f\x5f\x63\x78\x61\x5f\x66\x69\x6e\x61\x6c\x69\x7a\x65\x00\x61\x62\x6f\x72\x74\x00\x6c\x69\x62\x63\x2e\x73\x6f\x2e\x36\x00\x47\x4c\x49\x42\x43\x5f\x32\x2e\x31\x37\x00\x47\x4c\x49\x42\x43\x5f\x32\x2e\x33\x34\x00\x5f\x49\x54\x4d\x5f\x64\x65\x72\x65\x67\x69\x73\x74\x65\x72\x54\x4d\x43\x6c\x6f\x6e\x65\x54\x61\x62\x6c\x65\x00\x5f\x5f\x67\x6d\x6f\x6e\x5f\x73\x74\x61\x72\x74\x5f\x5f\x00\x5f\x49\x54\x4d\x5f\x72\x65\x67\x69\x73\x74\x65\x72\x54\x4d\x43\x6c\x6f\x6e\x65\x54\x61\x62\x6c\x65\x00\x00\x00\x00\x00\x00\x00\x02\x00\x01\x00\x03\x00\x03\x00\x01\x00\x03\x00\x01\x00\x01\x00\x02\x00\x2f\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x97\x91\x96\x06\x00\x00\x03\x00\x39\x00\x00\x00\x10\x00\x00\x00\xb4\x91\x96\x06\x00\x00\x02\x00\x44\x00\x00\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x50\x07\x00\x00\x00\x00\x00\x00\x98\xfd\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\xf0\xff\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x54\x07\x00\x00\x00\x00\x00\x00\x08\x00\x02\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x08\x00\x02\x00\x00\x00\x00\x00\xd8\xff\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe0\xff\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe8\xff\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf8\xff\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa8\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb0\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb8\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc0\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc8\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\x20\x03\xd5\xfd\x7b\xbf\xa9\xfd\x03\x00\x91\x2c\x00\x00\x94\xfd\x7b\xc1\xa8\xc0\x03\x5f\xd6\xf0\x7b\xbf\xa9\xf0\x00\x00\xf0\x11\xd2\x47\xf9\x10\x82\x3e\x91\x20\x02\x1f\xd6\x1f\x20\x03\xd5\x1f\x20\x03\xd5\x1f\x20\x03\xd5\xf0\x00\x00\xf0\x11\xd6\x47\xf9\x10\xa2\x3e\x91\x20\x02\x1f\xd6\xf0\x00\x00\xf0\x11\xda\x47\xf9\x10\xc2\x3e\x91\x20\x02\x1f\xd6\xf0\x00\x00\xf0\x11\xde\x47\xf9\x10\xe2\x3e\x91\x20\x02\x1f\xd6\xf0\x00\x00\xf0\x11\xe2\x47\xf9\x10\x02\x3f\x91\x20\x02\x1f\xd6\xf0\x00\x00\xf0\x11\xe6\x47\xf9\x10\x22\x3f\x91\x20\x02\x1f\xd6\x1f\x20\x03\xd5\x1d\x00\x80\xd2\x1e\x00\x80\xd2\xe5\x03\x00\xaa\xe1\x03\x40\xf9\xe2\x23\x00\x91\xe6\x03\x00\x91\xe0\x00\x00\xf0\x00\xf8\x47\xf9\x03\x00\x80\xd2\x04\x00\x80\xd2\xe1\xff\xff\x97\xf0\xff\xff\x97\xe0\x00\x00\xf0\x00\xf4\x47\xf9\x40\x00\x00\xb4\xe8\xff\xff\x17\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\x1f\x20\x03\xd5\x00\x01\x00\x90\x00\x40\x00\x91\x01\x01\x00\x90\x21\x40\x00\x91\x3f\x00\x00\xeb\xc0\x00\x00\x54\xe1\x00\x00\xf0\x21\xec\x47\xf9\x61\x00\x00\xb4\xf0\x03\x01\xaa\x00\x02\x1f\xd6\xc0\x03\x5f\xd6\x00\x01\x00\x90\x00\x40\x00\x91\x01\x01\x00\x90\x21\x40\x00\x91\x21\x00\x00\xcb\x22\xfc\x7f\xd3\x41\x0c\x81\x8b\x21\xfc\x41\x93\xc1\x00\x00\xb4\xe2\x00\x00\xf0\x42\xfc\x47\xf9\x62\x00\x00\xb4\xf0\x03\x02\xaa\x00\x02\x1f\xd6\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\xfd\x7b\xbe\xa9\xfd\x03\x00\x91\xf3\x0b\x00\xf9\x13\x01\x00\x90\x60\x42\x40\x39\x40\x01\x00\x35\xe0\x00\x00\xf0\x00\xf0\x47\xf9\x80\x00\x00\xb4\x00\x01\x00\x90\x00\x04\x40\xf9\xb5\xff\xff\x97\xd8\xff\xff\x97\x20\x00\x80\x52\x60\x42\x00\x39\xf3\x0b\x40\xf9\xfd\x7b\xc2\xa8\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\x1f\x20\x03\xd5\xdc\xff\xff\x17\xfd\x7b\xbc\xa9\xfd\x03\x00\x91\x80\x02\x80\xd2\xac\xff\xff\x97\xe0\x0f\x00\xf9\xe0\x0f\x40\xf9\x81\x01\x80\x52\x01\x00\x00\xb9\xe0\x0f\x40\xf9\xe0\x13\x00\xf9\xe0\x13\x40\xf9\x00\x04\x00\x91\xe0\x17\x00\xf9\x00\x01\x80\xd2\xa1\xff\xff\x97\xe0\x1b\x00\xf9\xe0\x1b\x40\xf9\xe1\x17\x40\xf9\x01\x00\x00\xf9\xe0\x1b\x40\xf9\x00\x00\x40\xf9\xe0\x1f\x00\xf9\x00\x00\x80\x52\xfd\x7b\xc4\xa8\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\xfd\x7b\xbf\xa9\xfd\x03\x00\x91\xfd\x7b\xc1\xa8\xc0\x03\x5f\xd6\x01\x00\x02\x00\x01\x1b\x03\x3b\x3c\x00\x00\x00\x06\x00\x00\x00\x70\xfe\xff\xff\x54\x00\x00\x00\xc0\xfe\xff\xff\x68\x00\x00\x00\xf0\xfe\xff\xff\x7c\x00\x00\x00\x30\xff\xff\xff\x90\x00\x00\x00\x80\xff\xff\xff\xb4\x00\x00\x00\x84\xff\xff\xff\xc8\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x01\x7a\x52\x00\x04\x78\x1e\x01\x1b\x0c\x1f\x00\x10\x00\x00\x00"), +Section(".comment", 0x0, "\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\xb7\x00\x01\x00\x00\x00\x40\x06\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\xa8\x0b\x01"), +Section(".interp", 0x238, "\x2f\x6c\x69\x62\x2f\x6c\x64\x2d\x6c\x69\x6e\x75\x78\x2d\x61\x61\x72\x63\x68\x36\x34\x2e\x73\x6f\x2e\x31\x00"), +Section(".note.gnu.build-id", 0x254, "\x04\x00\x00\x00\x14\x00\x00\x00\x03\x00\x00\x00\x47\x4e\x55\x00\xe7\x83\xd0\x0b\x65\xff\x69\x99\x15\x9f\x13\x77\x3c\xa9\xc0\x82\xdf\x41\xac\xfa"), +Section(".note.ABI-tag", 0x278, "\x04\x00\x00\x00\x10\x00\x00\x00\x01\x00\x00\x00\x47\x4e\x55\x00\x00\x00\x00\x00\x03\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00"), +Section(".gnu.hash", 0x298, "\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".dynsym", 0x2B8, "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x0b\x00\xb8\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x16\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x4f\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1a\x00\x00\x00\x22\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x6b\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x29\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7a\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".dynstr", 0x3A8, "\x00\x6d\x61\x6c\x6c\x6f\x63\x00\x5f\x5f\x6c\x69\x62\x63\x5f\x73\x74\x61\x72\x74\x5f\x6d\x61\x69\x6e\x00\x5f\x5f\x63\x78\x61\x5f\x66\x69\x6e\x61\x6c\x69\x7a\x65\x00\x61\x62\x6f\x72\x74\x00\x6c\x69\x62\x63\x2e\x73\x6f\x2e\x36\x00\x47\x4c\x49\x42\x43\x5f\x32\x2e\x31\x37\x00\x47\x4c\x49\x42\x43\x5f\x32\x2e\x33\x34\x00\x5f\x49\x54\x4d\x5f\x64\x65\x72\x65\x67\x69\x73\x74\x65\x72\x54\x4d\x43\x6c\x6f\x6e\x65\x54\x61\x62\x6c\x65\x00\x5f\x5f\x67\x6d\x6f\x6e\x5f\x73\x74\x61\x72\x74\x5f\x5f\x00\x5f\x49\x54\x4d\x5f\x72\x65\x67\x69\x73\x74\x65\x72\x54\x4d\x43\x6c\x6f\x6e\x65\x54\x61\x62\x6c\x65\x00"), +Section(".gnu.version", 0x43C, "\x00\x00\x00\x00\x00\x00\x02\x00\x01\x00\x03\x00\x03\x00\x01\x00\x03\x00\x01\x00"), +Section(".gnu.version_r", 0x450, "\x01\x00\x02\x00\x2f\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x97\x91\x96\x06\x00\x00\x03\x00\x39\x00\x00\x00\x10\x00\x00\x00\xb4\x91\x96\x06\x00\x00\x02\x00\x44\x00\x00\x00\x00\x00\x00\x00"), +Section(".rela.dyn", 0x480, "\x90\xfd\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x50\x07\x00\x00\x00\x00\x00\x00\x98\xfd\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\xf0\xff\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x54\x07\x00\x00\x00\x00\x00\x00\x08\x00\x02\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x08\x00\x02\x00\x00\x00\x00\x00\xd8\xff\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe0\xff\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe8\xff\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf8\xff\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".rela.plt", 0x540, "\xa8\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb0\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xb8\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc0\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc8\xff\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".init", 0x5B8, "\x1f\x20\x03\xd5\xfd\x7b\xbf\xa9\xfd\x03\x00\x91\x2c\x00\x00\x94\xfd\x7b\xc1\xa8\xc0\x03\x5f\xd6"), +Section(".plt", 0x5D0, "\xf0\x7b\xbf\xa9\xf0\x00\x00\xf0\x11\xd2\x47\xf9\x10\x82\x3e\x91\x20\x02\x1f\xd6\x1f\x20\x03\xd5\x1f\x20\x03\xd5\x1f\x20\x03\xd5\xf0\x00\x00\xf0\x11\xd6\x47\xf9\x10\xa2\x3e\x91\x20\x02\x1f\xd6\xf0\x00\x00\xf0\x11\xda\x47\xf9\x10\xc2\x3e\x91\x20\x02\x1f\xd6\xf0\x00\x00\xf0\x11\xde\x47\xf9\x10\xe2\x3e\x91\x20\x02\x1f\xd6\xf0\x00\x00\xf0\x11\xe2\x47\xf9\x10\x02\x3f\x91\x20\x02\x1f\xd6\xf0\x00\x00\xf0\x11\xe6\x47\xf9\x10\x22\x3f\x91\x20\x02\x1f\xd6"), +Section(".fini", 0x7B8, "\x1f\x20\x03\xd5\xfd\x7b\xbf\xa9\xfd\x03\x00\x91\xfd\x7b\xc1\xa8\xc0\x03\x5f\xd6"), +Section(".rodata", 0x7CC, "\x01\x00\x02\x00"), +Section(".eh_frame_hdr", 0x7D0, "\x01\x1b\x03\x3b\x3c\x00\x00\x00\x06\x00\x00\x00\x70\xfe\xff\xff\x54\x00\x00\x00\xc0\xfe\xff\xff\x68\x00\x00\x00\xf0\xfe\xff\xff\x7c\x00\x00\x00\x30\xff\xff\xff\x90\x00\x00\x00\x80\xff\xff\xff\xb4\x00\x00\x00\x84\xff\xff\xff\xc8\x00\x00\x00"), +Section(".eh_frame", 0x810, "\x10\x00\x00\x00\x00\x00\x00\x00\x01\x7a\x52\x00\x04\x78\x1e\x01\x1b\x0c\x1f\x00\x10\x00\x00\x00\x18\x00\x00\x00\x14\xfe\xff\xff\x34\x00\x00\x00\x00\x41\x07\x1e\x10\x00\x00\x00\x2c\x00\x00\x00\x50\xfe\xff\xff\x30\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x40\x00\x00\x00\x6c\xfe\xff\xff\x3c\x00\x00\x00\x00\x00\x00\x00\x20\x00\x00\x00\x54\x00\x00\x00\x98\xfe\xff\xff\x48\x00\x00\x00\x00\x41\x0e\x20\x9d\x04\x9e\x03\x42\x93\x02\x4e\xde\xdd\xd3\x0e\x00\x00\x00\x00\x10\x00\x00\x00\x78\x00\x00\x00\xc4\xfe\xff\xff\x04\x00\x00\x00\x00\x00\x00\x00\x1c\x00\x00\x00\x8c\x00\x00\x00\xb4\xfe\xff\xff\x64\x00\x00\x00\x00\x41\x0e\x40\x9d\x08\x9e\x07\x57\xde\xdd\x0e\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".fini_array", 0x1FD98, "\x00\x07\x00\x00\x00\x00\x00\x00"), +Section(".dynamic", 0x1FDA0, "\x01\x00\x00\x00\x00\x00\x00\x00\x2f\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00\xb8\x05\x00\x00\x00\x00\x00\x00\x0d\x00\x00\x00\x00\x00\x00\x00\xb8\x07\x00\x00\x00\x00\x00\x00\x19\x00\x00\x00\x00\x00\x00\x00\x90\xfd\x01\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\x98\xfd\x01\x00\x00\x00\x00\x00\x1c\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\xf5\xfe\xff\x6f\x00\x00\x00\x00\x98\x02\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\xa8\x03\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\xb8\x02\x00\x00\x00\x00\x00\x00\x0a\x00\x00\x00\x00\x00\x00\x00\x94\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x00\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x15\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x90\xff\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x78\x00\x00\x00\x00\x00\x00\x00\x14\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x17\x00\x00\x00\x00\x00\x00\x00\x40\x05\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x80\x04\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\xfb\xff\xff\x6f\x00\x00\x00\x00\x01\x00\x00\x08\x00\x00\x00\x00\xfe\xff\xff\x6f\x00\x00\x00\x00\x50\x04\x00\x00\x00\x00\x00\x00\xff\xff\xff\x6f\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xf0\xff\xff\x6f\x00\x00\x00\x00\x3c\x04\x00\x00\x00\x00\x00\x00\xf9\xff\xff\x6f\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".got", 0x1FF90, "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd0\x05\x00\x00\x00\x00\x00\x00\xd0\x05\x00\x00\x00\x00\x00\x00\xd0\x05\x00\x00\x00\x00\x00\x00\xd0\x05\x00\x00\x00\x00\x00\x00\xd0\x05\x00\x00\x00\x00\x00\x00\xa0\xfd\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x54\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".data", 0x20000, "\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x02\x00\x00\x00\x00\x00"), +Section(".init_array", 0x1FD90, "\x50\x07\x00\x00\x00\x00\x00\x00"), +Section(".text", 0x640, "\x1f\x20\x03\xd5\x1d\x00\x80\xd2\x1e\x00\x80\xd2\xe5\x03\x00\xaa\xe1\x03\x40\xf9\xe2\x23\x00\x91\xe6\x03\x00\x91\xe0\x00\x00\xf0\x00\xf8\x47\xf9\x03\x00\x80\xd2\x04\x00\x80\xd2\xe1\xff\xff\x97\xf0\xff\xff\x97\xe0\x00\x00\xf0\x00\xf4\x47\xf9\x40\x00\x00\xb4\xe8\xff\xff\x17\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\x1f\x20\x03\xd5\x00\x01\x00\x90\x00\x40\x00\x91\x01\x01\x00\x90\x21\x40\x00\x91\x3f\x00\x00\xeb\xc0\x00\x00\x54\xe1\x00\x00\xf0\x21\xec\x47\xf9\x61\x00\x00\xb4\xf0\x03\x01\xaa\x00\x02\x1f\xd6\xc0\x03\x5f\xd6\x00\x01\x00\x90\x00\x40\x00\x91\x01\x01\x00\x90\x21\x40\x00\x91\x21\x00\x00\xcb\x22\xfc\x7f\xd3\x41\x0c\x81\x8b\x21\xfc\x41\x93\xc1\x00\x00\xb4\xe2\x00\x00\xf0\x42\xfc\x47\xf9\x62\x00\x00\xb4\xf0\x03\x02\xaa\x00\x02\x1f\xd6\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\xfd\x7b\xbe\xa9\xfd\x03\x00\x91\xf3\x0b\x00\xf9\x13\x01\x00\x90\x60\x42\x40\x39\x40\x01\x00\x35\xe0\x00\x00\xf0\x00\xf0\x47\xf9\x80\x00\x00\xb4\x00\x01\x00\x90\x00\x04\x40\xf9\xb5\xff\xff\x97\xd8\xff\xff\x97\x20\x00\x80\x52\x60\x42\x00\x39\xf3\x0b\x40\xf9\xfd\x7b\xc2\xa8\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\x1f\x20\x03\xd5\xdc\xff\xff\x17\xfd\x7b\xbc\xa9\xfd\x03\x00\x91\x80\x02\x80\xd2\xac\xff\xff\x97\xe0\x0f\x00\xf9\xe0\x0f\x40\xf9\x81\x01\x80\x52\x01\x00\x00\xb9\xe0\x0f\x40\xf9\xe0\x13\x00\xf9\xe0\x13\x40\xf9\x00\x04\x00\x91\xe0\x17\x00\xf9\x00\x01\x80\xd2\xa1\xff\xff\x97\xe0\x1b\x00\xf9\xe0\x1b\x40\xf9\xe1\x17\x40\xf9\x01\x00\x00\xf9\xe0\x1b\x40\xf9\x00\x00\x40\xf9\xe0\x1f\x00\xf9\x00\x00\x80\x52\xfd\x7b\xc4\xa8\xc0\x03\x5f\xd6")]), +Memmap([Annotation(Region(0x0,0x8BB), Attr("segment","02 0 2236")), +Annotation(Region(0x640,0x673), Attr("symbol","_start")), +Annotation(Region(0x0,0xF9), Attr("section",".shstrtab")), +Annotation(Region(0x0,0x242), Attr("section",".strtab")), +Annotation(Region(0x0,0x827), Attr("section",".symtab")), +Annotation(Region(0x0,0x2A), Attr("section",".comment")), +Annotation(Region(0x238,0x252), Attr("section",".interp")), +Annotation(Region(0x254,0x277), Attr("section",".note.gnu.build-id")), +Annotation(Region(0x278,0x297), Attr("section",".note.ABI-tag")), +Annotation(Region(0x298,0x2B3), Attr("section",".gnu.hash")), +Annotation(Region(0x2B8,0x3A7), Attr("section",".dynsym")), +Annotation(Region(0x3A8,0x43B), Attr("section",".dynstr")), +Annotation(Region(0x43C,0x44F), Attr("section",".gnu.version")), +Annotation(Region(0x450,0x47F), Attr("section",".gnu.version_r")), +Annotation(Region(0x480,0x53F), Attr("section",".rela.dyn")), +Annotation(Region(0x540,0x5B7), Attr("section",".rela.plt")), +Annotation(Region(0x5B8,0x5CF), Attr("section",".init")), +Annotation(Region(0x5D0,0x63F), Attr("section",".plt")), +Annotation(Region(0x5B8,0x5CF), Attr("code-region","()")), +Annotation(Region(0x5D0,0x63F), Attr("code-region","()")), +Annotation(Region(0x640,0x673), Attr("symbol-info","_start 0x640 52")), +Annotation(Region(0x674,0x687), Attr("symbol","call_weak_fn")), +Annotation(Region(0x674,0x687), Attr("symbol-info","call_weak_fn 0x674 20")), +Annotation(Region(0x754,0x7B7), Attr("symbol","main")), +Annotation(Region(0x754,0x7B7), Attr("symbol-info","main 0x754 100")), +Annotation(Region(0x7B8,0x7CB), Attr("section",".fini")), +Annotation(Region(0x7CC,0x7CF), Attr("section",".rodata")), +Annotation(Region(0x7D0,0x80B), Attr("section",".eh_frame_hdr")), +Annotation(Region(0x810,0x8BB), Attr("section",".eh_frame")), +Annotation(Region(0x1FD90,0x2000F), Attr("segment","03 0x1FD90 648")), +Annotation(Region(0x1FD98,0x1FD9F), Attr("section",".fini_array")), +Annotation(Region(0x1FDA0,0x1FF8F), Attr("section",".dynamic")), +Annotation(Region(0x1FF90,0x1FFFF), Attr("section",".got")), +Annotation(Region(0x20000,0x2000F), Attr("section",".data")), +Annotation(Region(0x1FD90,0x1FD97), Attr("section",".init_array")), +Annotation(Region(0x640,0x7B7), Attr("section",".text")), +Annotation(Region(0x640,0x7B7), Attr("code-region","()")), +Annotation(Region(0x7B8,0x7CB), Attr("code-region","()"))]), +Program(Tid(1_768, "%000006e8"), Attrs([]), + Subs([Sub(Tid(1_715, "@__cxa_finalize"), Attrs([Attr("address","0x600"), +Attr("stub","()"), Attr("c.proto","signed (*)(void)")]), "__cxa_finalize", + Args([Arg(Tid(1_769, "%000006e9"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("__cxa_finalize_result",Imm(32)), LOW(32,Var("R0",Imm(64))), Out())]), +Blks([Blk(Tid(1_051, "@__cxa_finalize"), Attrs([Attr("address","0x600")]), + Phis([]), Defs([Def(Tid(1_315, "%00000523"), Attrs([Attr("address","0x600"), +Attr("insn","adrp x16, #126976")]), Var("R16",Imm(64)), Int(126976,64)), +Def(Tid(1_322, "%0000052a"), Attrs([Attr("address","0x604"), +Attr("insn","ldr x17, [x16, #0xfb0]")]), Var("R17",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R16",Imm(64)),Int(4016,64)),LittleEndian(),64)), +Def(Tid(1_328, "%00000530"), Attrs([Attr("address","0x608"), +Attr("insn","add x16, x16, #0xfb0")]), Var("R16",Imm(64)), +PLUS(Var("R16",Imm(64)),Int(4016,64)))]), Jmps([Call(Tid(1_333, "%00000535"), + Attrs([Attr("address","0x60C"), Attr("insn","br x17")]), Int(1,1), +(Indirect(Var("R17",Imm(64))),))]))])), +Sub(Tid(1_716, "@__do_global_dtors_aux"), Attrs([Attr("address","0x700"), +Attr("c.proto","signed (*)(void)")]), "__do_global_dtors_aux", + Args([Arg(Tid(1_770, "%000006ea"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("__do_global_dtors_aux_result",Imm(32)), LOW(32,Var("R0",Imm(64))), +Out())]), Blks([Blk(Tid(709, "@__do_global_dtors_aux"), + Attrs([Attr("address","0x700")]), Phis([]), Defs([Def(Tid(713, "%000002c9"), + Attrs([Attr("address","0x700"), +Attr("insn","stp x29, x30, [sp, #-0x20]!")]), Var("#3",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(18446744073709551584,64))), +Def(Tid(719, "%000002cf"), Attrs([Attr("address","0x700"), +Attr("insn","stp x29, x30, [sp, #-0x20]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("#3",Imm(64)),Var("R29",Imm(64)),LittleEndian(),64)), +Def(Tid(725, "%000002d5"), Attrs([Attr("address","0x700"), +Attr("insn","stp x29, x30, [sp, #-0x20]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("#3",Imm(64)),Int(8,64)),Var("R30",Imm(64)),LittleEndian(),64)), +Def(Tid(729, "%000002d9"), Attrs([Attr("address","0x700"), +Attr("insn","stp x29, x30, [sp, #-0x20]!")]), Var("R31",Imm(64)), +Var("#3",Imm(64))), Def(Tid(735, "%000002df"), + Attrs([Attr("address","0x704"), Attr("insn","mov x29, sp")]), + Var("R29",Imm(64)), Var("R31",Imm(64))), Def(Tid(743, "%000002e7"), + Attrs([Attr("address","0x708"), Attr("insn","str x19, [sp, #0x10]")]), + Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(16,64)),Var("R19",Imm(64)),LittleEndian(),64)), +Def(Tid(748, "%000002ec"), Attrs([Attr("address","0x70C"), +Attr("insn","adrp x19, #131072")]), Var("R19",Imm(64)), Int(131072,64)), +Def(Tid(755, "%000002f3"), Attrs([Attr("address","0x710"), +Attr("insn","ldrb w0, [x19, #0x10]")]), Var("R0",Imm(64)), +UNSIGNED(64,Load(Var("mem",Mem(64,8)),PLUS(Var("R19",Imm(64)),Int(16,64)),LittleEndian(),8)))]), +Jmps([Goto(Tid(762, "%000002fa"), Attrs([Attr("address","0x714"), +Attr("insn","cbnz w0, #0x28")]), + NEQ(Extract(31,0,Var("R0",Imm(64))),Int(0,32)), +Direct(Tid(760, "%000002f8"))), Goto(Tid(1_758, "%000006de"), Attrs([]), + Int(1,1), Direct(Tid(996, "%000003e4")))])), Blk(Tid(996, "%000003e4"), + Attrs([Attr("address","0x718")]), Phis([]), Defs([Def(Tid(999, "%000003e7"), + Attrs([Attr("address","0x718"), Attr("insn","adrp x0, #126976")]), + Var("R0",Imm(64)), Int(126976,64)), Def(Tid(1_006, "%000003ee"), + Attrs([Attr("address","0x71C"), Attr("insn","ldr x0, [x0, #0xfe0]")]), + Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R0",Imm(64)),Int(4064,64)),LittleEndian(),64))]), +Jmps([Goto(Tid(1_012, "%000003f4"), Attrs([Attr("address","0x720"), +Attr("insn","cbz x0, #0x10")]), EQ(Var("R0",Imm(64)),Int(0,64)), +Direct(Tid(1_010, "%000003f2"))), Goto(Tid(1_759, "%000006df"), Attrs([]), + Int(1,1), Direct(Tid(1_035, "%0000040b")))])), Blk(Tid(1_035, "%0000040b"), + Attrs([Attr("address","0x724")]), Phis([]), +Defs([Def(Tid(1_038, "%0000040e"), Attrs([Attr("address","0x724"), +Attr("insn","adrp x0, #131072")]), Var("R0",Imm(64)), Int(131072,64)), +Def(Tid(1_045, "%00000415"), Attrs([Attr("address","0x728"), +Attr("insn","ldr x0, [x0, #0x8]")]), Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R0",Imm(64)),Int(8,64)),LittleEndian(),64)), +Def(Tid(1_050, "%0000041a"), Attrs([Attr("address","0x72C"), +Attr("insn","bl #-0x12c")]), Var("R30",Imm(64)), Int(1840,64))]), +Jmps([Call(Tid(1_053, "%0000041d"), Attrs([Attr("address","0x72C"), +Attr("insn","bl #-0x12c")]), Int(1,1), +(Direct(Tid(1_715, "@__cxa_finalize")),Direct(Tid(1_010, "%000003f2"))))])), +Blk(Tid(1_010, "%000003f2"), Attrs([Attr("address","0x730")]), Phis([]), +Defs([Def(Tid(1_018, "%000003fa"), Attrs([Attr("address","0x730"), +Attr("insn","bl #-0xa0")]), Var("R30",Imm(64)), Int(1844,64))]), +Jmps([Call(Tid(1_020, "%000003fc"), Attrs([Attr("address","0x730"), +Attr("insn","bl #-0xa0")]), Int(1,1), +(Direct(Tid(1_729, "@deregister_tm_clones")),Direct(Tid(1_022, "%000003fe"))))])), +Blk(Tid(1_022, "%000003fe"), Attrs([Attr("address","0x734")]), Phis([]), +Defs([Def(Tid(1_025, "%00000401"), Attrs([Attr("address","0x734"), +Attr("insn","mov w0, #0x1")]), Var("R0",Imm(64)), Int(1,64)), +Def(Tid(1_033, "%00000409"), Attrs([Attr("address","0x738"), +Attr("insn","strb w0, [x19, #0x10]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R19",Imm(64)),Int(16,64)),Extract(7,0,Var("R0",Imm(64))),LittleEndian(),8))]), +Jmps([Goto(Tid(1_760, "%000006e0"), Attrs([]), Int(1,1), +Direct(Tid(760, "%000002f8")))])), Blk(Tid(760, "%000002f8"), + Attrs([Attr("address","0x73C")]), Phis([]), Defs([Def(Tid(770, "%00000302"), + Attrs([Attr("address","0x73C"), Attr("insn","ldr x19, [sp, #0x10]")]), + Var("R19",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(16,64)),LittleEndian(),64)), +Def(Tid(777, "%00000309"), Attrs([Attr("address","0x740"), +Attr("insn","ldp x29, x30, [sp], #0x20")]), Var("R29",Imm(64)), +Load(Var("mem",Mem(64,8)),Var("R31",Imm(64)),LittleEndian(),64)), +Def(Tid(782, "%0000030e"), Attrs([Attr("address","0x740"), +Attr("insn","ldp x29, x30, [sp], #0x20")]), Var("R30",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(8,64)),LittleEndian(),64)), +Def(Tid(786, "%00000312"), Attrs([Attr("address","0x740"), +Attr("insn","ldp x29, x30, [sp], #0x20")]), Var("R31",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(32,64)))]), Jmps([Call(Tid(791, "%00000317"), + Attrs([Attr("address","0x744"), Attr("insn","ret")]), Int(1,1), +(Indirect(Var("R30",Imm(64))),))]))])), Sub(Tid(1_720, "@__libc_start_main"), + Attrs([Attr("address","0x5F0"), Attr("stub","()"), +Attr("c.proto","signed (*)(signed (*)(signed , char** , char** );* main, signed , char** , \nvoid* auxv)")]), + "__libc_start_main", Args([Arg(Tid(1_771, "%000006eb"), + Attrs([Attr("c.data","Top:u64 ptr ptr"), +Attr("c.layout","**[ : 64]"), +Attr("c.type","signed (*)(signed , char** , char** );*")]), + Var("__libc_start_main_main",Imm(64)), Var("R0",Imm(64)), In()), +Arg(Tid(1_772, "%000006ec"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("__libc_start_main_arg2",Imm(32)), LOW(32,Var("R1",Imm(64))), In()), +Arg(Tid(1_773, "%000006ed"), Attrs([Attr("c.data","Top:u8 ptr ptr"), +Attr("c.layout","**[char : 8]"), Attr("c.type","char**")]), + Var("__libc_start_main_arg3",Imm(64)), Var("R2",Imm(64)), Both()), +Arg(Tid(1_774, "%000006ee"), Attrs([Attr("c.data","{} ptr"), +Attr("c.layout","*[ : 8]"), Attr("c.type","void*")]), + Var("__libc_start_main_auxv",Imm(64)), Var("R3",Imm(64)), Both()), +Arg(Tid(1_775, "%000006ef"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("__libc_start_main_result",Imm(32)), LOW(32,Var("R0",Imm(64))), +Out())]), Blks([Blk(Tid(542, "@__libc_start_main"), + Attrs([Attr("address","0x5F0")]), Phis([]), +Defs([Def(Tid(1_293, "%0000050d"), Attrs([Attr("address","0x5F0"), +Attr("insn","adrp x16, #126976")]), Var("R16",Imm(64)), Int(126976,64)), +Def(Tid(1_300, "%00000514"), Attrs([Attr("address","0x5F4"), +Attr("insn","ldr x17, [x16, #0xfa8]")]), Var("R17",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R16",Imm(64)),Int(4008,64)),LittleEndian(),64)), +Def(Tid(1_306, "%0000051a"), Attrs([Attr("address","0x5F8"), +Attr("insn","add x16, x16, #0xfa8")]), Var("R16",Imm(64)), +PLUS(Var("R16",Imm(64)),Int(4008,64)))]), Jmps([Call(Tid(1_311, "%0000051f"), + Attrs([Attr("address","0x5FC"), Attr("insn","br x17")]), Int(1,1), +(Indirect(Var("R17",Imm(64))),))]))])), Sub(Tid(1_721, "@_fini"), + Attrs([Attr("address","0x7B8"), Attr("c.proto","signed (*)(void)")]), + "_fini", Args([Arg(Tid(1_776, "%000006f0"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("_fini_result",Imm(32)), LOW(32,Var("R0",Imm(64))), Out())]), +Blks([Blk(Tid(50, "@_fini"), Attrs([Attr("address","0x7B8")]), Phis([]), +Defs([Def(Tid(56, "%00000038"), Attrs([Attr("address","0x7BC"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("#0",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(18446744073709551600,64))), +Def(Tid(62, "%0000003e"), Attrs([Attr("address","0x7BC"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("#0",Imm(64)),Var("R29",Imm(64)),LittleEndian(),64)), +Def(Tid(68, "%00000044"), Attrs([Attr("address","0x7BC"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("#0",Imm(64)),Int(8,64)),Var("R30",Imm(64)),LittleEndian(),64)), +Def(Tid(72, "%00000048"), Attrs([Attr("address","0x7BC"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("R31",Imm(64)), +Var("#0",Imm(64))), Def(Tid(78, "%0000004e"), Attrs([Attr("address","0x7C0"), +Attr("insn","mov x29, sp")]), Var("R29",Imm(64)), Var("R31",Imm(64))), +Def(Tid(85, "%00000055"), Attrs([Attr("address","0x7C4"), +Attr("insn","ldp x29, x30, [sp], #0x10")]), Var("R29",Imm(64)), +Load(Var("mem",Mem(64,8)),Var("R31",Imm(64)),LittleEndian(),64)), +Def(Tid(90, "%0000005a"), Attrs([Attr("address","0x7C4"), +Attr("insn","ldp x29, x30, [sp], #0x10")]), Var("R30",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(8,64)),LittleEndian(),64)), +Def(Tid(94, "%0000005e"), Attrs([Attr("address","0x7C4"), +Attr("insn","ldp x29, x30, [sp], #0x10")]), Var("R31",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(16,64)))]), Jmps([Call(Tid(99, "%00000063"), + Attrs([Attr("address","0x7C8"), Attr("insn","ret")]), Int(1,1), +(Indirect(Var("R30",Imm(64))),))]))])), Sub(Tid(1_722, "@_init"), + Attrs([Attr("address","0x5B8"), Attr("c.proto","signed (*)(void)")]), + "_init", Args([Arg(Tid(1_777, "%000006f1"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("_init_result",Imm(32)), LOW(32,Var("R0",Imm(64))), Out())]), +Blks([Blk(Tid(1_528, "@_init"), Attrs([Attr("address","0x5B8")]), Phis([]), +Defs([Def(Tid(1_534, "%000005fe"), Attrs([Attr("address","0x5BC"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("#6",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(18446744073709551600,64))), +Def(Tid(1_540, "%00000604"), Attrs([Attr("address","0x5BC"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("#6",Imm(64)),Var("R29",Imm(64)),LittleEndian(),64)), +Def(Tid(1_546, "%0000060a"), Attrs([Attr("address","0x5BC"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("#6",Imm(64)),Int(8,64)),Var("R30",Imm(64)),LittleEndian(),64)), +Def(Tid(1_550, "%0000060e"), Attrs([Attr("address","0x5BC"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("R31",Imm(64)), +Var("#6",Imm(64))), Def(Tid(1_556, "%00000614"), + Attrs([Attr("address","0x5C0"), Attr("insn","mov x29, sp")]), + Var("R29",Imm(64)), Var("R31",Imm(64))), Def(Tid(1_561, "%00000619"), + Attrs([Attr("address","0x5C4"), Attr("insn","bl #0xb0")]), + Var("R30",Imm(64)), Int(1480,64))]), Jmps([Call(Tid(1_563, "%0000061b"), + Attrs([Attr("address","0x5C4"), Attr("insn","bl #0xb0")]), Int(1,1), +(Direct(Tid(1_727, "@call_weak_fn")),Direct(Tid(1_565, "%0000061d"))))])), +Blk(Tid(1_565, "%0000061d"), Attrs([Attr("address","0x5C8")]), Phis([]), +Defs([Def(Tid(1_570, "%00000622"), Attrs([Attr("address","0x5C8"), +Attr("insn","ldp x29, x30, [sp], #0x10")]), Var("R29",Imm(64)), +Load(Var("mem",Mem(64,8)),Var("R31",Imm(64)),LittleEndian(),64)), +Def(Tid(1_575, "%00000627"), Attrs([Attr("address","0x5C8"), +Attr("insn","ldp x29, x30, [sp], #0x10")]), Var("R30",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(8,64)),LittleEndian(),64)), +Def(Tid(1_579, "%0000062b"), Attrs([Attr("address","0x5C8"), +Attr("insn","ldp x29, x30, [sp], #0x10")]), Var("R31",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(16,64)))]), Jmps([Call(Tid(1_584, "%00000630"), + Attrs([Attr("address","0x5CC"), Attr("insn","ret")]), Int(1,1), +(Indirect(Var("R30",Imm(64))),))]))])), Sub(Tid(1_723, "@_start"), + Attrs([Attr("address","0x640"), Attr("stub","()"), Attr("entry-point","()"), +Attr("c.proto","signed (*)(void)")]), "_start", + Args([Arg(Tid(1_778, "%000006f2"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("_start_result",Imm(32)), LOW(32,Var("R0",Imm(64))), Out())]), +Blks([Blk(Tid(479, "@_start"), Attrs([Attr("address","0x640")]), Phis([]), +Defs([Def(Tid(484, "%000001e4"), Attrs([Attr("address","0x644"), +Attr("insn","mov x29, #0x0")]), Var("R29",Imm(64)), Int(0,64)), +Def(Tid(489, "%000001e9"), Attrs([Attr("address","0x648"), +Attr("insn","mov x30, #0x0")]), Var("R30",Imm(64)), Int(0,64)), +Def(Tid(495, "%000001ef"), Attrs([Attr("address","0x64C"), +Attr("insn","mov x5, x0")]), Var("R5",Imm(64)), Var("R0",Imm(64))), +Def(Tid(502, "%000001f6"), Attrs([Attr("address","0x650"), +Attr("insn","ldr x1, [sp]")]), Var("R1",Imm(64)), +Load(Var("mem",Mem(64,8)),Var("R31",Imm(64)),LittleEndian(),64)), +Def(Tid(508, "%000001fc"), Attrs([Attr("address","0x654"), +Attr("insn","add x2, sp, #0x8")]), Var("R2",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(8,64))), Def(Tid(514, "%00000202"), + Attrs([Attr("address","0x658"), Attr("insn","mov x6, sp")]), + Var("R6",Imm(64)), Var("R31",Imm(64))), Def(Tid(519, "%00000207"), + Attrs([Attr("address","0x65C"), Attr("insn","adrp x0, #126976")]), + Var("R0",Imm(64)), Int(126976,64)), Def(Tid(526, "%0000020e"), + Attrs([Attr("address","0x660"), Attr("insn","ldr x0, [x0, #0xff0]")]), + Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R0",Imm(64)),Int(4080,64)),LittleEndian(),64)), +Def(Tid(531, "%00000213"), Attrs([Attr("address","0x664"), +Attr("insn","mov x3, #0x0")]), Var("R3",Imm(64)), Int(0,64)), +Def(Tid(536, "%00000218"), Attrs([Attr("address","0x668"), +Attr("insn","mov x4, #0x0")]), Var("R4",Imm(64)), Int(0,64)), +Def(Tid(541, "%0000021d"), Attrs([Attr("address","0x66C"), +Attr("insn","bl #-0x7c")]), Var("R30",Imm(64)), Int(1648,64))]), +Jmps([Call(Tid(544, "%00000220"), Attrs([Attr("address","0x66C"), +Attr("insn","bl #-0x7c")]), Int(1,1), +(Direct(Tid(1_720, "@__libc_start_main")),Direct(Tid(546, "%00000222"))))])), +Blk(Tid(546, "%00000222"), Attrs([Attr("address","0x670")]), Phis([]), +Defs([Def(Tid(549, "%00000225"), Attrs([Attr("address","0x670"), +Attr("insn","bl #-0x40")]), Var("R30",Imm(64)), Int(1652,64))]), +Jmps([Call(Tid(552, "%00000228"), Attrs([Attr("address","0x670"), +Attr("insn","bl #-0x40")]), Int(1,1), +(Direct(Tid(1_726, "@abort")),Direct(Tid(1_761, "%000006e1"))))])), +Blk(Tid(1_761, "%000006e1"), Attrs([]), Phis([]), Defs([]), +Jmps([Call(Tid(1_762, "%000006e2"), Attrs([]), Int(1,1), +(Direct(Tid(1_727, "@call_weak_fn")),))]))])), Sub(Tid(1_726, "@abort"), + Attrs([Attr("address","0x630"), Attr("stub","()"), Attr("noreturn","()"), +Attr("c.proto","void (*)(void)")]), "abort", Args([]), +Blks([Blk(Tid(550, "@abort"), Attrs([Attr("address","0x630")]), Phis([]), +Defs([Def(Tid(1_381, "%00000565"), Attrs([Attr("address","0x630"), +Attr("insn","adrp x16, #126976")]), Var("R16",Imm(64)), Int(126976,64)), +Def(Tid(1_388, "%0000056c"), Attrs([Attr("address","0x634"), +Attr("insn","ldr x17, [x16, #0xfc8]")]), Var("R17",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R16",Imm(64)),Int(4040,64)),LittleEndian(),64)), +Def(Tid(1_394, "%00000572"), Attrs([Attr("address","0x638"), +Attr("insn","add x16, x16, #0xfc8")]), Var("R16",Imm(64)), +PLUS(Var("R16",Imm(64)),Int(4040,64)))]), Jmps([Call(Tid(1_399, "%00000577"), + Attrs([Attr("address","0x63C"), Attr("insn","br x17")]), Int(1,1), +(Indirect(Var("R17",Imm(64))),))]))])), Sub(Tid(1_727, "@call_weak_fn"), + Attrs([Attr("address","0x674"), Attr("c.proto","signed (*)(void)")]), + "call_weak_fn", Args([Arg(Tid(1_779, "%000006f3"), + Attrs([Attr("c.data","Top:u32"), Attr("c.layout","[signed : 32]"), +Attr("c.type","signed")]), Var("call_weak_fn_result",Imm(32)), +LOW(32,Var("R0",Imm(64))), Out())]), Blks([Blk(Tid(554, "@call_weak_fn"), + Attrs([Attr("address","0x674")]), Phis([]), Defs([Def(Tid(557, "%0000022d"), + Attrs([Attr("address","0x674"), Attr("insn","adrp x0, #126976")]), + Var("R0",Imm(64)), Int(126976,64)), Def(Tid(564, "%00000234"), + Attrs([Attr("address","0x678"), Attr("insn","ldr x0, [x0, #0xfe8]")]), + Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R0",Imm(64)),Int(4072,64)),LittleEndian(),64))]), +Jmps([Goto(Tid(570, "%0000023a"), Attrs([Attr("address","0x67C"), +Attr("insn","cbz x0, #0x8")]), EQ(Var("R0",Imm(64)),Int(0,64)), +Direct(Tid(568, "%00000238"))), Goto(Tid(1_763, "%000006e3"), Attrs([]), + Int(1,1), Direct(Tid(1_115, "%0000045b")))])), Blk(Tid(568, "%00000238"), + Attrs([Attr("address","0x684")]), Phis([]), Defs([]), +Jmps([Call(Tid(576, "%00000240"), Attrs([Attr("address","0x684"), +Attr("insn","ret")]), Int(1,1), (Indirect(Var("R30",Imm(64))),))])), +Blk(Tid(1_115, "%0000045b"), Attrs([Attr("address","0x680")]), Phis([]), +Defs([]), Jmps([Goto(Tid(1_118, "%0000045e"), Attrs([Attr("address","0x680"), +Attr("insn","b #-0x60")]), Int(1,1), +Direct(Tid(1_116, "@__gmon_start__")))])), Blk(Tid(1_116, "@__gmon_start__"), + Attrs([Attr("address","0x620")]), Phis([]), +Defs([Def(Tid(1_359, "%0000054f"), Attrs([Attr("address","0x620"), +Attr("insn","adrp x16, #126976")]), Var("R16",Imm(64)), Int(126976,64)), +Def(Tid(1_366, "%00000556"), Attrs([Attr("address","0x624"), +Attr("insn","ldr x17, [x16, #0xfc0]")]), Var("R17",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R16",Imm(64)),Int(4032,64)),LittleEndian(),64)), +Def(Tid(1_372, "%0000055c"), Attrs([Attr("address","0x628"), +Attr("insn","add x16, x16, #0xfc0")]), Var("R16",Imm(64)), +PLUS(Var("R16",Imm(64)),Int(4032,64)))]), Jmps([Call(Tid(1_377, "%00000561"), + Attrs([Attr("address","0x62C"), Attr("insn","br x17")]), Int(1,1), +(Indirect(Var("R17",Imm(64))),))]))])), +Sub(Tid(1_729, "@deregister_tm_clones"), Attrs([Attr("address","0x690"), +Attr("c.proto","signed (*)(void)")]), "deregister_tm_clones", + Args([Arg(Tid(1_780, "%000006f4"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("deregister_tm_clones_result",Imm(32)), LOW(32,Var("R0",Imm(64))), +Out())]), Blks([Blk(Tid(582, "@deregister_tm_clones"), + Attrs([Attr("address","0x690")]), Phis([]), Defs([Def(Tid(585, "%00000249"), + Attrs([Attr("address","0x690"), Attr("insn","adrp x0, #131072")]), + Var("R0",Imm(64)), Int(131072,64)), Def(Tid(591, "%0000024f"), + Attrs([Attr("address","0x694"), Attr("insn","add x0, x0, #0x10")]), + Var("R0",Imm(64)), PLUS(Var("R0",Imm(64)),Int(16,64))), +Def(Tid(596, "%00000254"), Attrs([Attr("address","0x698"), +Attr("insn","adrp x1, #131072")]), Var("R1",Imm(64)), Int(131072,64)), +Def(Tid(602, "%0000025a"), Attrs([Attr("address","0x69C"), +Attr("insn","add x1, x1, #0x10")]), Var("R1",Imm(64)), +PLUS(Var("R1",Imm(64)),Int(16,64))), Def(Tid(608, "%00000260"), + Attrs([Attr("address","0x6A0"), Attr("insn","cmp x1, x0")]), + Var("#1",Imm(64)), NOT(Var("R0",Imm(64)))), Def(Tid(613, "%00000265"), + Attrs([Attr("address","0x6A0"), Attr("insn","cmp x1, x0")]), + Var("#2",Imm(64)), PLUS(Var("R1",Imm(64)),NOT(Var("R0",Imm(64))))), +Def(Tid(619, "%0000026b"), Attrs([Attr("address","0x6A0"), +Attr("insn","cmp x1, x0")]), Var("VF",Imm(1)), +NEQ(SIGNED(65,PLUS(Var("#2",Imm(64)),Int(1,64))),PLUS(PLUS(SIGNED(65,Var("R1",Imm(64))),SIGNED(65,Var("#1",Imm(64)))),Int(1,65)))), +Def(Tid(625, "%00000271"), Attrs([Attr("address","0x6A0"), +Attr("insn","cmp x1, x0")]), Var("CF",Imm(1)), +NEQ(UNSIGNED(65,PLUS(Var("#2",Imm(64)),Int(1,64))),PLUS(PLUS(UNSIGNED(65,Var("R1",Imm(64))),UNSIGNED(65,Var("#1",Imm(64)))),Int(1,65)))), +Def(Tid(629, "%00000275"), Attrs([Attr("address","0x6A0"), +Attr("insn","cmp x1, x0")]), Var("ZF",Imm(1)), +EQ(PLUS(Var("#2",Imm(64)),Int(1,64)),Int(0,64))), Def(Tid(633, "%00000279"), + Attrs([Attr("address","0x6A0"), Attr("insn","cmp x1, x0")]), + Var("NF",Imm(1)), Extract(63,63,PLUS(Var("#2",Imm(64)),Int(1,64))))]), +Jmps([Goto(Tid(639, "%0000027f"), Attrs([Attr("address","0x6A4"), +Attr("insn","b.eq #0x18")]), EQ(Var("ZF",Imm(1)),Int(1,1)), +Direct(Tid(637, "%0000027d"))), Goto(Tid(1_764, "%000006e4"), Attrs([]), + Int(1,1), Direct(Tid(1_085, "%0000043d")))])), Blk(Tid(1_085, "%0000043d"), + Attrs([Attr("address","0x6A8")]), Phis([]), +Defs([Def(Tid(1_088, "%00000440"), Attrs([Attr("address","0x6A8"), +Attr("insn","adrp x1, #126976")]), Var("R1",Imm(64)), Int(126976,64)), +Def(Tid(1_095, "%00000447"), Attrs([Attr("address","0x6AC"), +Attr("insn","ldr x1, [x1, #0xfd8]")]), Var("R1",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R1",Imm(64)),Int(4056,64)),LittleEndian(),64))]), +Jmps([Goto(Tid(1_100, "%0000044c"), Attrs([Attr("address","0x6B0"), +Attr("insn","cbz x1, #0xc")]), EQ(Var("R1",Imm(64)),Int(0,64)), +Direct(Tid(637, "%0000027d"))), Goto(Tid(1_765, "%000006e5"), Attrs([]), + Int(1,1), Direct(Tid(1_104, "%00000450")))])), Blk(Tid(637, "%0000027d"), + Attrs([Attr("address","0x6BC")]), Phis([]), Defs([]), +Jmps([Call(Tid(645, "%00000285"), Attrs([Attr("address","0x6BC"), +Attr("insn","ret")]), Int(1,1), (Indirect(Var("R30",Imm(64))),))])), +Blk(Tid(1_104, "%00000450"), Attrs([Attr("address","0x6B4")]), Phis([]), +Defs([Def(Tid(1_108, "%00000454"), Attrs([Attr("address","0x6B4"), +Attr("insn","mov x16, x1")]), Var("R16",Imm(64)), Var("R1",Imm(64)))]), +Jmps([Call(Tid(1_113, "%00000459"), Attrs([Attr("address","0x6B8"), +Attr("insn","br x16")]), Int(1,1), (Indirect(Var("R16",Imm(64))),))]))])), +Sub(Tid(1_732, "@frame_dummy"), Attrs([Attr("address","0x750"), +Attr("c.proto","signed (*)(void)")]), "frame_dummy", + Args([Arg(Tid(1_781, "%000006f5"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("frame_dummy_result",Imm(32)), LOW(32,Var("R0",Imm(64))), Out())]), +Blks([Blk(Tid(797, "@frame_dummy"), Attrs([Attr("address","0x750")]), + Phis([]), Defs([]), Jmps([Call(Tid(799, "%0000031f"), + Attrs([Attr("address","0x750"), Attr("insn","b #-0x90")]), Int(1,1), +(Direct(Tid(1_735, "@register_tm_clones")),))]))])), Sub(Tid(1_733, "@main"), + Attrs([Attr("address","0x754"), +Attr("c.proto","signed (*)(signed argc, const char** argv)")]), "main", + Args([Arg(Tid(1_782, "%000006f6"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("main_argc",Imm(32)), LOW(32,Var("R0",Imm(64))), In()), +Arg(Tid(1_783, "%000006f7"), Attrs([Attr("c.data","Top:u8 ptr ptr"), +Attr("c.layout","**[char : 8]"), Attr("c.type"," const char**")]), + Var("main_argv",Imm(64)), Var("R1",Imm(64)), Both()), +Arg(Tid(1_784, "%000006f8"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("main_result",Imm(32)), LOW(32,Var("R0",Imm(64))), Out())]), +Blks([Blk(Tid(801, "@main"), Attrs([Attr("address","0x754")]), Phis([]), +Defs([Def(Tid(805, "%00000325"), Attrs([Attr("address","0x754"), +Attr("insn","stp x29, x30, [sp, #-0x40]!")]), Var("#4",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(18446744073709551552,64))), +Def(Tid(811, "%0000032b"), Attrs([Attr("address","0x754"), +Attr("insn","stp x29, x30, [sp, #-0x40]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("#4",Imm(64)),Var("R29",Imm(64)),LittleEndian(),64)), +Def(Tid(817, "%00000331"), Attrs([Attr("address","0x754"), +Attr("insn","stp x29, x30, [sp, #-0x40]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("#4",Imm(64)),Int(8,64)),Var("R30",Imm(64)),LittleEndian(),64)), +Def(Tid(821, "%00000335"), Attrs([Attr("address","0x754"), +Attr("insn","stp x29, x30, [sp, #-0x40]!")]), Var("R31",Imm(64)), +Var("#4",Imm(64))), Def(Tid(827, "%0000033b"), + Attrs([Attr("address","0x758"), Attr("insn","mov x29, sp")]), + Var("R29",Imm(64)), Var("R31",Imm(64))), Def(Tid(832, "%00000340"), + Attrs([Attr("address","0x75C"), Attr("insn","mov x0, #0x14")]), + Var("R0",Imm(64)), Int(20,64)), Def(Tid(837, "%00000345"), + Attrs([Attr("address","0x760"), Attr("insn","bl #-0x150")]), + Var("R30",Imm(64)), Int(1892,64))]), Jmps([Call(Tid(840, "%00000348"), + Attrs([Attr("address","0x760"), Attr("insn","bl #-0x150")]), Int(1,1), +(Direct(Tid(1_734, "@malloc")),Direct(Tid(842, "%0000034a"))))])), +Blk(Tid(842, "%0000034a"), Attrs([Attr("address","0x764")]), Phis([]), +Defs([Def(Tid(848, "%00000350"), Attrs([Attr("address","0x764"), +Attr("insn","str x0, [sp, #0x18]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(24,64)),Var("R0",Imm(64)),LittleEndian(),64)), +Def(Tid(855, "%00000357"), Attrs([Attr("address","0x768"), +Attr("insn","ldr x0, [sp, #0x18]")]), Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(24,64)),LittleEndian(),64)), +Def(Tid(860, "%0000035c"), Attrs([Attr("address","0x76C"), +Attr("insn","mov w1, #0xc")]), Var("R1",Imm(64)), Int(12,64)), +Def(Tid(868, "%00000364"), Attrs([Attr("address","0x770"), +Attr("insn","str w1, [x0]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("R0",Imm(64)),Extract(31,0,Var("R1",Imm(64))),LittleEndian(),32)), +Def(Tid(875, "%0000036b"), Attrs([Attr("address","0x774"), +Attr("insn","ldr x0, [sp, #0x18]")]), Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(24,64)),LittleEndian(),64)), +Def(Tid(883, "%00000373"), Attrs([Attr("address","0x778"), +Attr("insn","str x0, [sp, #0x20]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(32,64)),Var("R0",Imm(64)),LittleEndian(),64)), +Def(Tid(890, "%0000037a"), Attrs([Attr("address","0x77C"), +Attr("insn","ldr x0, [sp, #0x20]")]), Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(32,64)),LittleEndian(),64)), +Def(Tid(896, "%00000380"), Attrs([Attr("address","0x780"), +Attr("insn","add x0, x0, #0x1")]), Var("R0",Imm(64)), +PLUS(Var("R0",Imm(64)),Int(1,64))), Def(Tid(904, "%00000388"), + Attrs([Attr("address","0x784"), Attr("insn","str x0, [sp, #0x28]")]), + Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(40,64)),Var("R0",Imm(64)),LittleEndian(),64)), +Def(Tid(909, "%0000038d"), Attrs([Attr("address","0x788"), +Attr("insn","mov x0, #0x8")]), Var("R0",Imm(64)), Int(8,64)), +Def(Tid(914, "%00000392"), Attrs([Attr("address","0x78C"), +Attr("insn","bl #-0x17c")]), Var("R30",Imm(64)), Int(1936,64))]), +Jmps([Call(Tid(916, "%00000394"), Attrs([Attr("address","0x78C"), +Attr("insn","bl #-0x17c")]), Int(1,1), +(Direct(Tid(1_734, "@malloc")),Direct(Tid(918, "%00000396"))))])), +Blk(Tid(918, "%00000396"), Attrs([Attr("address","0x790")]), Phis([]), +Defs([Def(Tid(924, "%0000039c"), Attrs([Attr("address","0x790"), +Attr("insn","str x0, [sp, #0x30]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(48,64)),Var("R0",Imm(64)),LittleEndian(),64)), +Def(Tid(931, "%000003a3"), Attrs([Attr("address","0x794"), +Attr("insn","ldr x0, [sp, #0x30]")]), Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(48,64)),LittleEndian(),64)), +Def(Tid(938, "%000003aa"), Attrs([Attr("address","0x798"), +Attr("insn","ldr x1, [sp, #0x28]")]), Var("R1",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(40,64)),LittleEndian(),64)), +Def(Tid(946, "%000003b2"), Attrs([Attr("address","0x79C"), +Attr("insn","str x1, [x0]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("R0",Imm(64)),Var("R1",Imm(64)),LittleEndian(),64)), +Def(Tid(953, "%000003b9"), Attrs([Attr("address","0x7A0"), +Attr("insn","ldr x0, [sp, #0x30]")]), Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(48,64)),LittleEndian(),64)), +Def(Tid(960, "%000003c0"), Attrs([Attr("address","0x7A4"), +Attr("insn","ldr x0, [x0]")]), Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),Var("R0",Imm(64)),LittleEndian(),64)), +Def(Tid(968, "%000003c8"), Attrs([Attr("address","0x7A8"), +Attr("insn","str x0, [sp, #0x38]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(56,64)),Var("R0",Imm(64)),LittleEndian(),64)), +Def(Tid(973, "%000003cd"), Attrs([Attr("address","0x7AC"), +Attr("insn","mov w0, #0x0")]), Var("R0",Imm(64)), Int(0,64)), +Def(Tid(980, "%000003d4"), Attrs([Attr("address","0x7B0"), +Attr("insn","ldp x29, x30, [sp], #0x40")]), Var("R29",Imm(64)), +Load(Var("mem",Mem(64,8)),Var("R31",Imm(64)),LittleEndian(),64)), +Def(Tid(985, "%000003d9"), Attrs([Attr("address","0x7B0"), +Attr("insn","ldp x29, x30, [sp], #0x40")]), Var("R30",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(8,64)),LittleEndian(),64)), +Def(Tid(989, "%000003dd"), Attrs([Attr("address","0x7B0"), +Attr("insn","ldp x29, x30, [sp], #0x40")]), Var("R31",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(64,64)))]), Jmps([Call(Tid(994, "%000003e2"), + Attrs([Attr("address","0x7B4"), Attr("insn","ret")]), Int(1,1), +(Indirect(Var("R30",Imm(64))),))]))])), Sub(Tid(1_734, "@malloc"), + Attrs([Attr("address","0x610"), Attr("stub","()"), Attr("malloc","()"), +Attr("c.proto","void* (*)(unsigned long size)")]), "malloc", + Args([Arg(Tid(1_785, "%000006f9"), Attrs([Attr("alloc-size","()"), +Attr("c.data","Top:u64"), Attr("c.layout","[unsigned long : 64]"), +Attr("c.type","unsigned long")]), Var("malloc_size",Imm(64)), +Var("R0",Imm(64)), In()), Arg(Tid(1_786, "%000006fa"), + Attrs([Attr("warn-unused","()"), Attr("c.data","{} ptr"), +Attr("c.layout","*[ : 8]"), Attr("c.type","void*")]), + Var("malloc_result",Imm(64)), Var("R0",Imm(64)), Out())]), +Blks([Blk(Tid(838, "@malloc"), Attrs([Attr("address","0x610")]), Phis([]), +Defs([Def(Tid(1_337, "%00000539"), Attrs([Attr("address","0x610"), +Attr("insn","adrp x16, #126976")]), Var("R16",Imm(64)), Int(126976,64)), +Def(Tid(1_344, "%00000540"), Attrs([Attr("address","0x614"), +Attr("insn","ldr x17, [x16, #0xfb8]")]), Var("R17",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R16",Imm(64)),Int(4024,64)),LittleEndian(),64)), +Def(Tid(1_350, "%00000546"), Attrs([Attr("address","0x618"), +Attr("insn","add x16, x16, #0xfb8")]), Var("R16",Imm(64)), +PLUS(Var("R16",Imm(64)),Int(4024,64)))]), Jmps([Call(Tid(1_355, "%0000054b"), + Attrs([Attr("address","0x61C"), Attr("insn","br x17")]), Int(1,1), +(Indirect(Var("R17",Imm(64))),))]))])), +Sub(Tid(1_735, "@register_tm_clones"), Attrs([Attr("address","0x6C0"), +Attr("c.proto","signed (*)(void)")]), "register_tm_clones", + Args([Arg(Tid(1_787, "%000006fb"), Attrs([Attr("c.data","Top:u32"), +Attr("c.layout","[signed : 32]"), Attr("c.type","signed")]), + Var("register_tm_clones_result",Imm(32)), LOW(32,Var("R0",Imm(64))), +Out())]), Blks([Blk(Tid(647, "@register_tm_clones"), + Attrs([Attr("address","0x6C0")]), Phis([]), Defs([Def(Tid(650, "%0000028a"), + Attrs([Attr("address","0x6C0"), Attr("insn","adrp x0, #131072")]), + Var("R0",Imm(64)), Int(131072,64)), Def(Tid(656, "%00000290"), + Attrs([Attr("address","0x6C4"), Attr("insn","add x0, x0, #0x10")]), + Var("R0",Imm(64)), PLUS(Var("R0",Imm(64)),Int(16,64))), +Def(Tid(661, "%00000295"), Attrs([Attr("address","0x6C8"), +Attr("insn","adrp x1, #131072")]), Var("R1",Imm(64)), Int(131072,64)), +Def(Tid(667, "%0000029b"), Attrs([Attr("address","0x6CC"), +Attr("insn","add x1, x1, #0x10")]), Var("R1",Imm(64)), +PLUS(Var("R1",Imm(64)),Int(16,64))), Def(Tid(674, "%000002a2"), + Attrs([Attr("address","0x6D0"), Attr("insn","sub x1, x1, x0")]), + Var("R1",Imm(64)), +PLUS(PLUS(Var("R1",Imm(64)),NOT(Var("R0",Imm(64)))),Int(1,64))), +Def(Tid(680, "%000002a8"), Attrs([Attr("address","0x6D4"), +Attr("insn","lsr x2, x1, #63")]), Var("R2",Imm(64)), +Concat(Int(0,63),Extract(63,63,Var("R1",Imm(64))))), +Def(Tid(687, "%000002af"), Attrs([Attr("address","0x6D8"), +Attr("insn","add x1, x2, x1, asr #3")]), Var("R1",Imm(64)), +PLUS(Var("R2",Imm(64)),ARSHIFT(Var("R1",Imm(64)),Int(3,3)))), +Def(Tid(693, "%000002b5"), Attrs([Attr("address","0x6DC"), +Attr("insn","asr x1, x1, #1")]), Var("R1",Imm(64)), +SIGNED(64,Extract(63,1,Var("R1",Imm(64)))))]), +Jmps([Goto(Tid(699, "%000002bb"), Attrs([Attr("address","0x6E0"), +Attr("insn","cbz x1, #0x18")]), EQ(Var("R1",Imm(64)),Int(0,64)), +Direct(Tid(697, "%000002b9"))), Goto(Tid(1_766, "%000006e6"), Attrs([]), + Int(1,1), Direct(Tid(1_055, "%0000041f")))])), Blk(Tid(1_055, "%0000041f"), + Attrs([Attr("address","0x6E4")]), Phis([]), +Defs([Def(Tid(1_058, "%00000422"), Attrs([Attr("address","0x6E4"), +Attr("insn","adrp x2, #126976")]), Var("R2",Imm(64)), Int(126976,64)), +Def(Tid(1_065, "%00000429"), Attrs([Attr("address","0x6E8"), +Attr("insn","ldr x2, [x2, #0xff8]")]), Var("R2",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R2",Imm(64)),Int(4088,64)),LittleEndian(),64))]), +Jmps([Goto(Tid(1_070, "%0000042e"), Attrs([Attr("address","0x6EC"), +Attr("insn","cbz x2, #0xc")]), EQ(Var("R2",Imm(64)),Int(0,64)), +Direct(Tid(697, "%000002b9"))), Goto(Tid(1_767, "%000006e7"), Attrs([]), + Int(1,1), Direct(Tid(1_074, "%00000432")))])), Blk(Tid(697, "%000002b9"), + Attrs([Attr("address","0x6F8")]), Phis([]), Defs([]), +Jmps([Call(Tid(705, "%000002c1"), Attrs([Attr("address","0x6F8"), +Attr("insn","ret")]), Int(1,1), (Indirect(Var("R30",Imm(64))),))])), +Blk(Tid(1_074, "%00000432"), Attrs([Attr("address","0x6F0")]), Phis([]), +Defs([Def(Tid(1_078, "%00000436"), Attrs([Attr("address","0x6F0"), +Attr("insn","mov x16, x2")]), Var("R16",Imm(64)), Var("R2",Imm(64)))]), +Jmps([Call(Tid(1_083, "%0000043b"), Attrs([Attr("address","0x6F4"), +Attr("insn","br x16")]), Int(1,1), +(Indirect(Var("R16",Imm(64))),))]))]))]))) \ No newline at end of file diff --git a/examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.bir b/examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.bir new file mode 100644 index 000000000..3b2746f07 --- /dev/null +++ b/examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.bir @@ -0,0 +1,268 @@ +000006ca: program +000006b3: sub __cxa_finalize(__cxa_finalize_result) +000006cb: __cxa_finalize_result :: out u32 = low:32[R0] + +0000041b: +00000523: R16 := 0x1F000 +0000052a: R17 := mem[R16 + 0xFB0, el]:u64 +00000530: R16 := R16 + 0xFB0 +00000535: call R17 with noreturn + +000006b4: sub __do_global_dtors_aux(__do_global_dtors_aux_result) +000006cc: __do_global_dtors_aux_result :: out u32 = low:32[R0] + +000002c5: +000002c9: #3 := R31 - 0x20 +000002cf: mem := mem with [#3, el]:u64 <- R29 +000002d5: mem := mem with [#3 + 8, el]:u64 <- R30 +000002d9: R31 := #3 +000002df: R29 := R31 +000002e7: mem := mem with [R31 + 0x10, el]:u64 <- R19 +000002ec: R19 := 0x20000 +000002f3: R0 := pad:64[mem[R19 + 0x10]] +000002fa: when 31:0[R0] <> 0 goto %000002f8 +000006b5: goto %000003e4 + +000003e4: +000003e7: R0 := 0x1F000 +000003ee: R0 := mem[R0 + 0xFE0, el]:u64 +000003f4: when R0 = 0 goto %000003f2 +000006b6: goto %0000040b + +0000040b: +0000040e: R0 := 0x20000 +00000415: R0 := mem[R0 + 8, el]:u64 +0000041a: R30 := 0x730 +0000041d: call @__cxa_finalize with return %000003f2 + +000003f2: +000003fa: R30 := 0x734 +000003fc: call @deregister_tm_clones with return %000003fe + +000003fe: +00000401: R0 := 1 +00000409: mem := mem with [R19 + 0x10] <- 7:0[R0] +000006b7: goto %000002f8 + +000002f8: +00000302: R19 := mem[R31 + 0x10, el]:u64 +00000309: R29 := mem[R31, el]:u64 +0000030e: R30 := mem[R31 + 8, el]:u64 +00000312: R31 := R31 + 0x20 +00000317: call R30 with noreturn + +000006b8: sub __libc_start_main(__libc_start_main_main, __libc_start_main_arg2, __libc_start_main_arg3, __libc_start_main_auxv, __libc_start_main_result) +000006cd: __libc_start_main_main :: in u64 = R0 +000006ce: __libc_start_main_arg2 :: in u32 = low:32[R1] +000006cf: __libc_start_main_arg3 :: in out u64 = R2 +000006d0: __libc_start_main_auxv :: in out u64 = R3 +000006d1: __libc_start_main_result :: out u32 = low:32[R0] + +0000021e: +0000050d: R16 := 0x1F000 +00000514: R17 := mem[R16 + 0xFA8, el]:u64 +0000051a: R16 := R16 + 0xFA8 +0000051f: call R17 with noreturn + +000006b9: sub _fini(_fini_result) +000006d2: _fini_result :: out u32 = low:32[R0] + +00000032: +00000038: #0 := R31 - 0x10 +0000003e: mem := mem with [#0, el]:u64 <- R29 +00000044: mem := mem with [#0 + 8, el]:u64 <- R30 +00000048: R31 := #0 +0000004e: R29 := R31 +00000055: R29 := mem[R31, el]:u64 +0000005a: R30 := mem[R31 + 8, el]:u64 +0000005e: R31 := R31 + 0x10 +00000063: call R30 with noreturn + +000006ba: sub _init(_init_result) +000006d3: _init_result :: out u32 = low:32[R0] + +000005f8: +000005fe: #6 := R31 - 0x10 +00000604: mem := mem with [#6, el]:u64 <- R29 +0000060a: mem := mem with [#6 + 8, el]:u64 <- R30 +0000060e: R31 := #6 +00000614: R29 := R31 +00000619: R30 := 0x5C8 +0000061b: call @call_weak_fn with return %0000061d + +0000061d: +00000622: R29 := mem[R31, el]:u64 +00000627: R30 := mem[R31 + 8, el]:u64 +0000062b: R31 := R31 + 0x10 +00000630: call R30 with noreturn + +000006bb: sub _start(_start_result) +000006d4: _start_result :: out u32 = low:32[R0] + +000001df: +000001e4: R29 := 0 +000001e9: R30 := 0 +000001ef: R5 := R0 +000001f6: R1 := mem[R31, el]:u64 +000001fc: R2 := R31 + 8 +00000202: R6 := R31 +00000207: R0 := 0x1F000 +0000020e: R0 := mem[R0 + 0xFF0, el]:u64 +00000213: R3 := 0 +00000218: R4 := 0 +0000021d: R30 := 0x670 +00000220: call @__libc_start_main with return %00000222 + +00000222: +00000225: R30 := 0x674 +00000228: call @abort with return %000006bc + +000006bc: +000006bd: call @call_weak_fn with noreturn + +000006be: sub abort() + + +00000226: +00000565: R16 := 0x1F000 +0000056c: R17 := mem[R16 + 0xFC8, el]:u64 +00000572: R16 := R16 + 0xFC8 +00000577: call R17 with noreturn + +000006bf: sub call_weak_fn(call_weak_fn_result) +000006d5: call_weak_fn_result :: out u32 = low:32[R0] + +0000022a: +0000022d: R0 := 0x1F000 +00000234: R0 := mem[R0 + 0xFE8, el]:u64 +0000023a: when R0 = 0 goto %00000238 +000006c0: goto %0000045b + +00000238: +00000240: call R30 with noreturn + +0000045b: +0000045e: goto @__gmon_start__ + +0000045c: +0000054f: R16 := 0x1F000 +00000556: R17 := mem[R16 + 0xFC0, el]:u64 +0000055c: R16 := R16 + 0xFC0 +00000561: call R17 with noreturn + +000006c1: sub deregister_tm_clones(deregister_tm_clones_result) +000006d6: deregister_tm_clones_result :: out u32 = low:32[R0] + +00000246: +00000249: R0 := 0x20000 +0000024f: R0 := R0 + 0x10 +00000254: R1 := 0x20000 +0000025a: R1 := R1 + 0x10 +00000260: #1 := ~R0 +00000265: #2 := R1 + ~R0 +0000026b: VF := extend:65[#2 + 1] <> extend:65[R1] + extend:65[#1] + 1 +00000271: CF := pad:65[#2 + 1] <> pad:65[R1] + pad:65[#1] + 1 +00000275: ZF := #2 + 1 = 0 +00000279: NF := 63:63[#2 + 1] +0000027f: when ZF goto %0000027d +000006c2: goto %0000043d + +0000043d: +00000440: R1 := 0x1F000 +00000447: R1 := mem[R1 + 0xFD8, el]:u64 +0000044c: when R1 = 0 goto %0000027d +000006c3: goto %00000450 + +0000027d: +00000285: call R30 with noreturn + +00000450: +00000454: R16 := R1 +00000459: call R16 with noreturn + +000006c4: sub frame_dummy(frame_dummy_result) +000006d7: frame_dummy_result :: out u32 = low:32[R0] + +0000031d: +0000031f: call @register_tm_clones with noreturn + +000006c5: sub main(main_argc, main_argv, main_result) +000006d8: main_argc :: in u32 = low:32[R0] +000006d9: main_argv :: in out u64 = R1 +000006da: main_result :: out u32 = low:32[R0] + +00000321: +00000325: #4 := R31 - 0x40 +0000032b: mem := mem with [#4, el]:u64 <- R29 +00000331: mem := mem with [#4 + 8, el]:u64 <- R30 +00000335: R31 := #4 +0000033b: R29 := R31 +00000340: R0 := 0x14 +00000345: R30 := 0x764 +00000348: call @malloc with return %0000034a + +0000034a: +00000350: mem := mem with [R31 + 0x18, el]:u64 <- R0 +00000357: R0 := mem[R31 + 0x18, el]:u64 +0000035c: R1 := 0xC +00000364: mem := mem with [R0, el]:u32 <- 31:0[R1] +0000036b: R0 := mem[R31 + 0x18, el]:u64 +00000373: mem := mem with [R31 + 0x20, el]:u64 <- R0 +0000037a: R0 := mem[R31 + 0x20, el]:u64 +00000380: R0 := R0 + 1 +00000388: mem := mem with [R31 + 0x28, el]:u64 <- R0 +0000038d: R0 := 8 +00000392: R30 := 0x790 +00000394: call @malloc with return %00000396 + +00000396: +0000039c: mem := mem with [R31 + 0x30, el]:u64 <- R0 +000003a3: R0 := mem[R31 + 0x30, el]:u64 +000003aa: R1 := mem[R31 + 0x28, el]:u64 +000003b2: mem := mem with [R0, el]:u64 <- R1 +000003b9: R0 := mem[R31 + 0x30, el]:u64 +000003c0: R0 := mem[R0, el]:u64 +000003c8: mem := mem with [R31 + 0x38, el]:u64 <- R0 +000003cd: R0 := 0 +000003d4: R29 := mem[R31, el]:u64 +000003d9: R30 := mem[R31 + 8, el]:u64 +000003dd: R31 := R31 + 0x40 +000003e2: call R30 with noreturn + +000006c6: sub malloc(malloc_size, malloc_result) +000006db: malloc_size :: in u64 = R0 +000006dc: malloc_result :: out u64 = R0 + +00000346: +00000539: R16 := 0x1F000 +00000540: R17 := mem[R16 + 0xFB8, el]:u64 +00000546: R16 := R16 + 0xFB8 +0000054b: call R17 with noreturn + +000006c7: sub register_tm_clones(register_tm_clones_result) +000006dd: register_tm_clones_result :: out u32 = low:32[R0] + +00000287: +0000028a: R0 := 0x20000 +00000290: R0 := R0 + 0x10 +00000295: R1 := 0x20000 +0000029b: R1 := R1 + 0x10 +000002a2: R1 := R1 + ~R0 + 1 +000002a8: R2 := 0.63:63[R1] +000002af: R1 := R2 + (R1 ~>> 3) +000002b5: R1 := extend:64[63:1[R1]] +000002bb: when R1 = 0 goto %000002b9 +000006c8: goto %0000041f + +0000041f: +00000422: R2 := 0x1F000 +00000429: R2 := mem[R2 + 0xFF8, el]:u64 +0000042e: when R2 = 0 goto %000002b9 +000006c9: goto %00000432 + +000002b9: +000002c1: call R30 with noreturn + +00000432: +00000436: R16 := R2 +0000043b: call R16 with noreturn diff --git a/examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.c b/examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.c new file mode 100644 index 000000000..d3c43c655 --- /dev/null +++ b/examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.c @@ -0,0 +1,12 @@ +#include + + +int main() { + int *bar = malloc(5 * sizeof(int)); + *bar = 12; + void * car = (void*) bar; + int* foo = (int*) (car + 1); + int ** tar = malloc(sizeof(int*)); + *tar = foo; + int *bat = *tar; +} diff --git a/examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.relf b/examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.relf new file mode 100644 index 000000000..ee608ea91 --- /dev/null +++ b/examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.relf @@ -0,0 +1,122 @@ + +Relocation section '.rela.dyn' at offset 0x480 contains 8 entries: + Offset Info Type Symbol's Value Symbol's Name + Addend +000000000001fd90 0000000000000403 R_AARCH64_RELATIVE 750 +000000000001fd98 0000000000000403 R_AARCH64_RELATIVE 700 +000000000001fff0 0000000000000403 R_AARCH64_RELATIVE 754 +0000000000020008 0000000000000403 R_AARCH64_RELATIVE 20008 +000000000001ffd8 0000000400000401 R_AARCH64_GLOB_DAT 0000000000000000 _ITM_deregisterTMCloneTable + 0 +000000000001ffe0 0000000500000401 R_AARCH64_GLOB_DAT 0000000000000000 __cxa_finalize@GLIBC_2.17 + 0 +000000000001ffe8 0000000700000401 R_AARCH64_GLOB_DAT 0000000000000000 __gmon_start__ + 0 +000000000001fff8 0000000900000401 R_AARCH64_GLOB_DAT 0000000000000000 _ITM_registerTMCloneTable + 0 + +Relocation section '.rela.plt' at offset 0x540 contains 5 entries: + Offset Info Type Symbol's Value Symbol's Name + Addend +000000000001ffa8 0000000300000402 R_AARCH64_JUMP_SLOT 0000000000000000 __libc_start_main@GLIBC_2.34 + 0 +000000000001ffb0 0000000500000402 R_AARCH64_JUMP_SLOT 0000000000000000 __cxa_finalize@GLIBC_2.17 + 0 +000000000001ffb8 0000000600000402 R_AARCH64_JUMP_SLOT 0000000000000000 malloc@GLIBC_2.17 + 0 +000000000001ffc0 0000000700000402 R_AARCH64_JUMP_SLOT 0000000000000000 __gmon_start__ + 0 +000000000001ffc8 0000000800000402 R_AARCH64_JUMP_SLOT 0000000000000000 abort@GLIBC_2.17 + 0 + +Symbol table '.dynsym' contains 10 entries: + Num: Value Size Type Bind Vis Ndx Name + 0: 0000000000000000 0 NOTYPE LOCAL DEFAULT UND + 1: 00000000000005b8 0 SECTION LOCAL DEFAULT 11 .init + 2: 0000000000020000 0 SECTION LOCAL DEFAULT 22 .data + 3: 0000000000000000 0 FUNC GLOBAL DEFAULT UND __libc_start_main@GLIBC_2.34 (2) + 4: 0000000000000000 0 NOTYPE WEAK DEFAULT UND _ITM_deregisterTMCloneTable + 5: 0000000000000000 0 FUNC WEAK DEFAULT UND __cxa_finalize@GLIBC_2.17 (3) + 6: 0000000000000000 0 FUNC GLOBAL DEFAULT UND malloc@GLIBC_2.17 (3) + 7: 0000000000000000 0 NOTYPE WEAK DEFAULT UND __gmon_start__ + 8: 0000000000000000 0 FUNC GLOBAL DEFAULT UND abort@GLIBC_2.17 (3) + 9: 0000000000000000 0 NOTYPE WEAK DEFAULT UND _ITM_registerTMCloneTable + +Symbol table '.symtab' contains 87 entries: + Num: Value Size Type Bind Vis Ndx Name + 0: 0000000000000000 0 NOTYPE LOCAL DEFAULT UND + 1: 0000000000000238 0 SECTION LOCAL DEFAULT 1 .interp + 2: 0000000000000254 0 SECTION LOCAL DEFAULT 2 .note.gnu.build-id + 3: 0000000000000278 0 SECTION LOCAL DEFAULT 3 .note.ABI-tag + 4: 0000000000000298 0 SECTION LOCAL DEFAULT 4 .gnu.hash + 5: 00000000000002b8 0 SECTION LOCAL DEFAULT 5 .dynsym + 6: 00000000000003a8 0 SECTION LOCAL DEFAULT 6 .dynstr + 7: 000000000000043c 0 SECTION LOCAL DEFAULT 7 .gnu.version + 8: 0000000000000450 0 SECTION LOCAL DEFAULT 8 .gnu.version_r + 9: 0000000000000480 0 SECTION LOCAL DEFAULT 9 .rela.dyn + 10: 0000000000000540 0 SECTION LOCAL DEFAULT 10 .rela.plt + 11: 00000000000005b8 0 SECTION LOCAL DEFAULT 11 .init + 12: 00000000000005d0 0 SECTION LOCAL DEFAULT 12 .plt + 13: 0000000000000640 0 SECTION LOCAL DEFAULT 13 .text + 14: 00000000000007b8 0 SECTION LOCAL DEFAULT 14 .fini + 15: 00000000000007cc 0 SECTION LOCAL DEFAULT 15 .rodata + 16: 00000000000007d0 0 SECTION LOCAL DEFAULT 16 .eh_frame_hdr + 17: 0000000000000810 0 SECTION LOCAL DEFAULT 17 .eh_frame + 18: 000000000001fd90 0 SECTION LOCAL DEFAULT 18 .init_array + 19: 000000000001fd98 0 SECTION LOCAL DEFAULT 19 .fini_array + 20: 000000000001fda0 0 SECTION LOCAL DEFAULT 20 .dynamic + 21: 000000000001ff90 0 SECTION LOCAL DEFAULT 21 .got + 22: 0000000000020000 0 SECTION LOCAL DEFAULT 22 .data + 23: 0000000000020010 0 SECTION LOCAL DEFAULT 23 .bss + 24: 0000000000000000 0 SECTION LOCAL DEFAULT 24 .comment + 25: 0000000000000000 0 FILE LOCAL DEFAULT ABS Scrt1.o + 26: 0000000000000278 0 NOTYPE LOCAL DEFAULT 3 $d + 27: 0000000000000278 32 OBJECT LOCAL DEFAULT 3 __abi_tag + 28: 0000000000000640 0 NOTYPE LOCAL DEFAULT 13 $x + 29: 0000000000000824 0 NOTYPE LOCAL DEFAULT 17 $d + 30: 00000000000007cc 0 NOTYPE LOCAL DEFAULT 15 $d + 31: 0000000000000000 0 FILE LOCAL DEFAULT ABS crti.o + 32: 0000000000000674 0 NOTYPE LOCAL DEFAULT 13 $x + 33: 0000000000000674 20 FUNC LOCAL DEFAULT 13 call_weak_fn + 34: 00000000000005b8 0 NOTYPE LOCAL DEFAULT 11 $x + 35: 00000000000007b8 0 NOTYPE LOCAL DEFAULT 14 $x + 36: 0000000000000000 0 FILE LOCAL DEFAULT ABS crtn.o + 37: 00000000000005c8 0 NOTYPE LOCAL DEFAULT 11 $x + 38: 00000000000007c4 0 NOTYPE LOCAL DEFAULT 14 $x + 39: 0000000000000000 0 FILE LOCAL DEFAULT ABS crtstuff.c + 40: 0000000000000690 0 NOTYPE LOCAL DEFAULT 13 $x + 41: 0000000000000690 0 FUNC LOCAL DEFAULT 13 deregister_tm_clones + 42: 00000000000006c0 0 FUNC LOCAL DEFAULT 13 register_tm_clones + 43: 0000000000020008 0 NOTYPE LOCAL DEFAULT 22 $d + 44: 0000000000000700 0 FUNC LOCAL DEFAULT 13 __do_global_dtors_aux + 45: 0000000000020010 1 OBJECT LOCAL DEFAULT 23 completed.0 + 46: 000000000001fd98 0 NOTYPE LOCAL DEFAULT 19 $d + 47: 000000000001fd98 0 OBJECT LOCAL DEFAULT 19 __do_global_dtors_aux_fini_array_entry + 48: 0000000000000750 0 FUNC LOCAL DEFAULT 13 frame_dummy + 49: 000000000001fd90 0 NOTYPE LOCAL DEFAULT 18 $d + 50: 000000000001fd90 0 OBJECT LOCAL DEFAULT 18 __frame_dummy_init_array_entry + 51: 0000000000000838 0 NOTYPE LOCAL DEFAULT 17 $d + 52: 0000000000020010 0 NOTYPE LOCAL DEFAULT 23 $d + 53: 0000000000000000 0 FILE LOCAL DEFAULT ABS unsafe_pointer_arithmetic.c + 54: 0000000000000754 0 NOTYPE LOCAL DEFAULT 13 $x + 55: 0000000000000898 0 NOTYPE LOCAL DEFAULT 17 $d + 56: 0000000000000000 0 FILE LOCAL DEFAULT ABS crtstuff.c + 57: 00000000000008b8 0 NOTYPE LOCAL DEFAULT 17 $d + 58: 00000000000008b8 0 OBJECT LOCAL DEFAULT 17 __FRAME_END__ + 59: 0000000000000000 0 FILE LOCAL DEFAULT ABS + 60: 000000000001fda0 0 OBJECT LOCAL DEFAULT ABS _DYNAMIC + 61: 00000000000007d0 0 NOTYPE LOCAL DEFAULT 16 __GNU_EH_FRAME_HDR + 62: 000000000001ffd0 0 OBJECT LOCAL DEFAULT ABS _GLOBAL_OFFSET_TABLE_ + 63: 00000000000005d0 0 NOTYPE LOCAL DEFAULT 12 $x + 64: 0000000000000000 0 FUNC GLOBAL DEFAULT UND __libc_start_main@GLIBC_2.34 + 65: 0000000000000000 0 NOTYPE WEAK DEFAULT UND _ITM_deregisterTMCloneTable + 66: 0000000000020000 0 NOTYPE WEAK DEFAULT 22 data_start + 67: 0000000000020010 0 NOTYPE GLOBAL DEFAULT 23 __bss_start__ + 68: 0000000000000000 0 FUNC WEAK DEFAULT UND __cxa_finalize@GLIBC_2.17 + 69: 0000000000020018 0 NOTYPE GLOBAL DEFAULT 23 _bss_end__ + 70: 0000000000020010 0 NOTYPE GLOBAL DEFAULT 22 _edata + 71: 00000000000007b8 0 FUNC GLOBAL HIDDEN 14 _fini + 72: 0000000000020018 0 NOTYPE GLOBAL DEFAULT 23 __bss_end__ + 73: 0000000000000000 0 FUNC GLOBAL DEFAULT UND malloc@GLIBC_2.17 + 74: 0000000000020000 0 NOTYPE GLOBAL DEFAULT 22 __data_start + 75: 0000000000000000 0 NOTYPE WEAK DEFAULT UND __gmon_start__ + 76: 0000000000020008 0 OBJECT GLOBAL HIDDEN 22 __dso_handle + 77: 0000000000000000 0 FUNC GLOBAL DEFAULT UND abort@GLIBC_2.17 + 78: 00000000000007cc 4 OBJECT GLOBAL DEFAULT 15 _IO_stdin_used + 79: 0000000000020018 0 NOTYPE GLOBAL DEFAULT 23 _end + 80: 0000000000000640 52 FUNC GLOBAL DEFAULT 13 _start + 81: 0000000000020018 0 NOTYPE GLOBAL DEFAULT 23 __end__ + 82: 0000000000020010 0 NOTYPE GLOBAL DEFAULT 23 __bss_start + 83: 0000000000000754 100 FUNC GLOBAL DEFAULT 13 main + 84: 0000000000020010 0 OBJECT GLOBAL HIDDEN 22 __TMC_END__ + 85: 0000000000000000 0 NOTYPE WEAK DEFAULT UND _ITM_registerTMCloneTable + 86: 00000000000005b8 0 FUNC GLOBAL HIDDEN 11 _init diff --git a/src/main/scala/analysis/DSA.scala b/src/main/scala/analysis/DSA.scala index 3814366cf..281a38c38 100644 --- a/src/main/scala/analysis/DSA.scala +++ b/src/main/scala/analysis/DSA.scala @@ -1,6 +1,6 @@ package analysis -import ir.{BitVecLiteral, BitVecType, CFGPosition, CallGraph, Procedure, Program, Register, Variable, computeDomain, end} +import ir.{begin, BitVecLiteral, BitVecType, CFGPosition, CallGraph, Procedure, Program, Register, Variable, computeDomain, end} import specification.{ExternalFunction, SpecGlobal} import scala.collection.mutable @@ -13,10 +13,11 @@ class DSA(program: Program, reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], writesTo: Map[Procedure, Set[Register]], params: Map[Procedure, Set[Variable]] - ) extends Analysis[Any] { + ) extends Analysis[Map[Procedure, DSG]] { val locals : mutable.Map[Procedure, DSG] = mutable.Map() val bu: mutable.Map[Procedure, DSG] = mutable.Map() + val td: mutable.Map[Procedure, DSG] = mutable.Map() val stackPointer = Register("R31", BitVecType(64)) val returnPointer = Register("R30", BitVecType(64)) @@ -32,18 +33,11 @@ class DSA(program: Program, (s, proc) => s ++ findLeaf(proc) } - def getCells(pos: CFGPosition, arg: Variable, graph: DSG): Set[(DSC, BigInt)] = - if reachingDefs(pos).contains(arg) then - reachingDefs(pos)(arg).foldLeft(Set[(DSC, BigInt)]()) { - (s, defintion) => - s + graph.varToCell(defintion)(arg) - } - else - Set(graph.formals(arg)) var visited = Set[Procedure]() val queue = mutable.Queue[Procedure]() - override def analyze(): Any = { + + override def analyze(): Map[Procedure, DSG] = { val domain = computeDomain(CallGraph, Set(program.mainProcedure)) domain.foreach( proc => @@ -58,17 +52,20 @@ class DSA(program: Program, proc => assert(locals(proc).callsites.isEmpty) visited += proc - val preds = CallGraph.pred(proc) queue.enqueueAll(CallGraph.pred(proc).diff(visited)) +// CallGraph.pred(proc).foreach(buildBUQueue) ) while queue.nonEmpty do val proc = queue.dequeue() + visited += proc + queue.enqueueAll(CallGraph.pred(proc).diff(visited)) val buGraph = bu(proc) + // it should be fine buGraph.callsites.foreach( // clone all the nodes first callSite => val callee = callSite.proc - val calleeGraph = locals(callee).cloneSelf() + val calleeGraph = locals(callee) //.cloneSelf() assert(calleeGraph.formals.keySet.diff(ignoreRegisters).equals(callSite.paramCells.keySet)) calleeGraph.formals.foreach{ case (variable: Variable, (cell: DSC, internalOffset: BigInt)) if !ignoreRegisters.contains(variable) => @@ -90,29 +87,120 @@ class DSA(program: Program, node.cloneNode(calleeGraph, buGraph) } ) - ) - buGraph.callsites.foreach(//unify nodes - callSite => - val callee = callSite.proc - val calleeGraph = locals(callee).cloneSelf() - calleeGraph.formals.foreach{ - case (variable: Variable, (cell: DSC, internalOffset: BigInt)) if !ignoreRegisters.contains(variable) => - buGraph.mergeCells(cell, callSite.paramCells(variable)) + + // TODO +// assert(calleeGraph.formals.isEmpty || buGraph.varToCell(begin(callee)).equals(calleeGraph.formals)) + buGraph.varToCell.getOrElse(begin(callee), Map.empty).foreach{ + case (variable: Variable, formal) if !ignoreRegisters.contains(variable) => + buGraph.mergeCells(adjust(formal), adjust(callSite.paramCells(variable))) case _ => } writesTo(callee).foreach( reg => - val returnCells = calleeGraph.getCells(end(callee), reg) -// assert(returnCells.nonEmpty) - returnCells.foldLeft(callSite.returnCells(reg)){ - case (c: DSC, (cell: DSC, internalOffset: BigInt)) => - buGraph.mergeCells(c, cell) + val returnCells = buGraph.getCells(end(callee), reg) + // assert(returnCells.nonEmpty) + val result: DSC = returnCells.foldLeft(adjust(callSite.returnCells(reg))){ + // + case (c: DSC, ret) => + buGraph.mergeCells(c, adjust(ret)) + } + + returnCells.foreach{ + case (cell: DSC, offset: BigInt) => + calleeGraph.replace(cell, result, 0) + } + ) + ) +// buGraph.callsites.foreach(//unify nodes +// callSite => +// val callee = callSite.proc +// val calleeGraph = locals(callee) //.cloneSelf() +// calleeGraph.formals.foreach{ +// case (variable: Variable, formal) if !ignoreRegisters.contains(variable) => +// // TODO merges here should update the node in the callee +// buGraph.mergeCells(adjust(formal), adjust(callSite.paramCells(variable))) +// case _ => +// } +// writesTo(callee).foreach( +// reg => +// val returnCells = calleeGraph.getCells(end(callee), reg) +//// assert(returnCells.nonEmpty) +// returnCells.foldLeft(adjust(callSite.returnCells(reg))){ +// // +// case (c: DSC, ret) => +// buGraph.mergeCells(c, adjust(ret)) +// } +// ) +// ) + // bottom up phase finished + // clone bu graphs to top-down graphs + domain.foreach( + proc => + td.update(proc, bu(proc).cloneSelf()) + ) + + queue.enqueue(program.mainProcedure) + visited = Set() + while queue.nonEmpty do + val proc = queue.dequeue() + visited += proc + queue.enqueueAll(CallGraph.succ(proc).diff(visited)) + val callersGraph = td(proc) + callersGraph.callsites.foreach( + callSite => + val callee = callSite.proc + val calleesGraph = td(callee) + callSite.paramCells.foreach{ + case (variable: Variable, (cell: DSC, internalOffset: BigInt)) => + val node = cell.node.get + node.cloneNode(callersGraph, calleesGraph) + } + + callSite.returnCells.foreach{ + case (variable: Variable, (cell: DSC, internalOffset: BigInt)) => + val node = cell.node.get + node.cloneNode(callersGraph, callersGraph) + } + + callSite.paramCells.keySet.foreach( + variable => + val paramCells = calleesGraph.getCells(callSite.call, variable) + paramCells.foldLeft(adjust(calleesGraph.formals(variable))) { + (cell, slice) => + calleesGraph.mergeCells(adjust(slice), cell) } ) + calleesGraph.varToCell.getOrElse(callSite.call, Map.empty).foreach{ + case (variable: Variable, cell: (DSC, BigInt)) => + val returnCells = calleesGraph.getCells(end(callee), variable) + returnCells.foldLeft(adjust(cell)){ + case (c: DSC, retCell: (DSC, BigInt)) => + calleesGraph.mergeCells(c, adjust(retCell)) + } + } ) +// callersGraph.callsites.foreach( +// callSite => +// val callee = callSite.proc +// val calleesGraph = td(callee) +// callSite.paramCells.foreach { +// case (variable: Variable, cell) => +// calleesGraph.mergeCells(adjust(cell), adjust(calleesGraph.formals(variable))) +// } +// +// callSite.returnCells.foreach { +// case (variable: Variable, cell: (DSC, BigInt)) => +// val returnCells = calleesGraph.getCells(end(callee), variable) +// returnCells.foldLeft(adjust(cell)){ +// case (c: DSC, retCell: (DSC, BigInt)) => +// calleesGraph.mergeCells(c, adjust(retCell)) +// } +// } +// ) + + td.toMap - println(bu) } } diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index 0986574ea..c22fe4033 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -1,6 +1,6 @@ package analysis -import ir.{BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IntraProcIRCursor, Literal, LocalAssign, Memory, MemoryAssign, MemoryLoad, MemoryStore, Procedure, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, computeDomain, toShortString} +import ir.{begin, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IntraProcIRCursor, Literal, LocalAssign, Memory, MemoryAssign, MemoryLoad, MemoryStore, Procedure, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, computeDomain, toShortString} import specification.{ExternalFunction, SpecGlobal} import scala.util.control.Breaks.{break, breakable} @@ -30,7 +30,7 @@ class DSG(val proc: Procedure, ) { // DSNodes owned by this graph val nodes: mutable.Set[DSN] = mutable.Set() - val pointTo: mutable.Map[DSC, DSC] = mutable.Map() + val pointTo: mutable.Map[DSC, (DSC, BigInt)] = mutable.Map() val callsites: mutable.Set[CallSite] = mutable.Set() val mallocRegister = Register("R0", BitVecType(64)) @@ -136,22 +136,25 @@ class DSG(val proc: Procedure, (pos, m) => m.foreach { case (variable, (cell, offset)) => - if cell.equals(oldCell) then - m.update(variable, (newCell, offset + internalOffsetChange)) + if cell.equals(oldCell) then +// if cell.node.equals(oldCell.node) && cell.offset + offset == oldCell.offset then + m.update(variable, (newCell, offset + internalOffsetChange)) } ) formals.foreach{ case (variable, (cell, offset)) => if cell.equals(oldCell) then +// if cell.node.equals(oldCell.node) && cell.offset + offset == oldCell.offset then formals.update(variable, (newCell, offset + internalOffsetChange)) } - private def replaceInPointTo(oldCell: DSC, newCell: DSC) = + private def replaceInPointTo(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = pointTo.foreach { - case (pointer, pointee) => - if pointee.equals(oldCell) then - pointTo.update(pointer, newCell) + case (pointer, (cell: DSC, pointeeInternal: BigInt)) => + if cell.equals(oldCell) then +// if cell.node.equals(oldCell.node) && cell.offset + pointeeInternal == oldCell.offset then + pointTo.update(pointer, (newCell, pointeeInternal + internalOffsetChange)) } private def replaceInGlobals(oldCell: DSC, newCell: DSC) = @@ -172,35 +175,41 @@ class DSG(val proc: Procedure, stackMapping.update(offset, newCell.node.get) } - private def replaceInCallSites(oldCell: DSC, newCell: DSC) = + private def replaceInCallSites(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = callsites.foreach( callSite => callSite.returnCells.foreach{ - case (variable: Variable, cell: DSC) => + case (variable: Variable, (cell: DSC, internal: BigInt)) => if cell.equals(oldCell) then - callSite.returnCells.update(variable, newCell) +// if cell.node.equals(oldCell.node) && cell.offset + internal == oldCell.offset then + callSite.returnCells.update(variable, (newCell, internal + internalOffsetChange)) } callSite.paramCells.foreach{ - case (variable: Variable, cell: DSC) => + case (variable: Variable, (cell: DSC, internal: BigInt)) => if cell.equals(oldCell) then - callSite.paramCells.update(variable, newCell) +// if cell.node.equals(oldCell.node) && cell.offset + internal == oldCell.offset then + callSite.paramCells.update(variable, (newCell, internal + internalOffsetChange)) } ) - private def replace(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = + def replace(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = replaceInEV(oldCell, newCell, internalOffsetChange) - replaceInPointTo(oldCell, newCell) + replaceInPointTo(oldCell, newCell, internalOffsetChange) replaceInGlobals(oldCell, newCell) replaceInStack(oldCell, newCell) - replaceInCallSites(oldCell, newCell) + replaceInCallSites(oldCell, newCell, internalOffsetChange) - def getPointee(cell: DSC): DSC = + def getPointee(cell: DSC): (DSC, BigInt) = if !pointTo.contains(cell) then val node = DSN(Some(this)) - pointTo.update(cell, node.cells(0)) + pointTo.update(cell, (node.cells(0), 0)) pointTo(cell) + def getPointeeAdjusted(cell:DSC): DSC = + val pointee = getPointee(cell) + adjust(pointee) + def getCells(pos: CFGPosition, arg: Variable): Set[(DSC, BigInt)] = if reachingDefs(pos).contains(arg) then reachingDefs(pos)(arg).foldLeft(Set[(DSC, BigInt)]()) { @@ -214,14 +223,18 @@ class DSG(val proc: Procedure, val collapedCell = DSC(Option(node), 0) val e = DSC(None, 0) + var pointeeInternalOffset: BigInt = 0 val cell = node.cells.foldLeft(e) { (c, field) => if pointTo.contains(field._2) && pointTo(field._2) == field._2 then - pointTo.update(field._2, collapedCell) + pointTo.update(field._2, (collapedCell, 0)) c else if pointTo.contains(field._2) then - mergeCells(c, getPointee(field._2)) + val (pointeeCell, internalOffset) = getPointee(field._2) + if internalOffset > pointeeInternalOffset then + pointeeInternalOffset = internalOffset + mergeCells(c, getPointeeAdjusted(field._2)) else c } @@ -243,7 +256,7 @@ class DSG(val proc: Procedure, node.cells.clear() node.cells.addOne(0, collapedCell) if cell.node.isDefined then - pointTo.update(node.cells(0), cell) + pointTo.update(node.cells(0), (cell, pointeeInternalOffset)) def optionalCollapse(node: DSN): Unit = { var lastOffset: BigInt = -1 @@ -266,7 +279,12 @@ class DSG(val proc: Procedure, require(cell1.node.equals(cell2.node) && cell1.offset < cell2.offset) if pointTo.contains(cell2) then if pointTo.contains(cell1) then - mergeCells(getPointee(cell1), getPointee(cell2)) + val (cell1Pointee: DSC, pointee1Internal: BigInt) = getPointee(cell1) + val (cell2Pointee: DSC, pointee2Internal: BigInt) = getPointee(cell2) + val result = mergeCells(getPointeeAdjusted(cell1), getPointeeAdjusted(cell2)) + assert(pointTo(cell1)._1.equals(result)) + // TODO + pointTo.update(cell1, (result,pointee2Internal.max(pointee1Internal))) else pointTo.update(cell1, getPointee(cell2)) pointTo.remove(cell2) @@ -295,18 +313,21 @@ class DSG(val proc: Procedure, node2.flags.join(node1.flags) if pointTo.contains(node1.cells(0)) then if pointTo.contains(node2.cells(0)) then - pointTo.update(node2.cells(0), mergeCells(getPointee(node1.cells(0)), getPointee(node2.cells(0)))) + val (pointee1: DSC, internal1: BigInt) = getPointee(node1.cells(0)) + val (pointee2: DSC, internal2: BigInt) = getPointee(node2.cells(0)) + val result = mergeCells(getPointeeAdjusted(node1.cells(0)), getPointeeAdjusted(node2.cells(0))) + pointTo.update(node2.cells(0), (result, internal1.max(internal2))) else pointTo.update(node2.cells(0), getPointee(node1.cells(0))) pointTo.remove(node1.cells(0)) replace(node1.cells(0), node2.cells(0), 0) node2.cells(0) - else if cell1.node.get.allocationRegions.isEmpty && cell1.offset == 0 && cell1.node.get.cells.size == 1 && cell1.largestAccessedSize == 0 && // - !pointTo.contains(cell1) && pointTo.values.foldLeft(true) { - (condition, cell) => cell != cell1 && condition - } then - replace(cell1, cell2, 0) - cell2 +// else if cell1.node.get.allocationRegions.isEmpty && cell1.offset == 0 && cell1.node.get.cells.size == 1 && cell1.largestAccessedSize == 0 && // +// !pointTo.contains(cell1) && pointTo.values.foldLeft(true) { +// (condition, cell) => cell != cell1 && condition +// } then +// replace(cell1, cell2, 0) +// cell2 else var delta = cell1.offset - cell2.offset @@ -343,7 +364,7 @@ class DSG(val proc: Procedure, if (lastOffset + lastAccess > offset) || lastOffset == offset then // includes this cell if (offset - lastOffset) + cell.largestAccessedSize > lastAccess then lastAccess = (offset - lastOffset) + cell.largestAccessedSize - resultCells.update(offset, (resultCells(offset)._1 + cell, lastAccess)) + resultCells.update(lastOffset, (resultCells(lastOffset)._1 + cell, lastAccess)) else lastOffset = offset lastAccess = cell.largestAccessedSize @@ -353,7 +374,7 @@ class DSG(val proc: Procedure, resultCells.foreach { case (offset: BigInt, (cells: Set[DSC], largestAccess: BigInt)) => val collapsedCell = resultNode.addCell(offset, largestAccess) - val outgoing: Set[DSC] = cells.foldLeft(Set()){ + val outgoing: Set[(DSC, BigInt)] = cells.foldLeft(Set[(DSC, BigInt)]()){ (set, cell) => // replace incoming edges if cell.node.get.equals(node2) then @@ -374,17 +395,21 @@ class DSG(val proc: Procedure, if outgoing.size == 1 then pointTo.update(collapsedCell, outgoing.head) else if outgoing.size > 1 then - val result = outgoing.tail.foldLeft(outgoing.head){ - (result, cell) => + var internal = outgoing.head._2 + val result = outgoing.tail.foldLeft(outgoing.head._1){ + (result, pointee) => + val cell = pointee._1 + val pointeeInternal = pointee._2 + internal = internal.max(pointeeInternal) mergeCells(result, cell) } - pointTo.update(collapsedCell, result) + pointTo.update(collapsedCell, (result, internal)) } if cell1.offset >= cell2.offset then - resultNode.cells(cell1.offset) + resultNode.getCell(cell1.offset) else - resultNode.cells(cell2.offset) + resultNode.getCell(cell2.offset) private def isFormal(pos: CFGPosition, variable: Variable): Boolean = @@ -392,7 +417,7 @@ class DSG(val proc: Procedure, val formals: mutable.Map[Variable, (DSC, BigInt)] = mutable.Map() - val varToCell: Map[CFGPosition, mutable.Map[Variable, (DSC, BigInt)]] = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(Map[CFGPosition, mutable.Map[Variable, (DSC, BigInt)]]()) { + val varToCell: mutable.Map[CFGPosition, mutable.Map[Variable, (DSC, BigInt)]] = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(mutable.Map[CFGPosition, mutable.Map[Variable, (DSC, BigInt)]]()) { (m, pos) => pos match case LocalAssign(variable, value , label) => @@ -438,7 +463,7 @@ class DSG(val proc: Procedure, val idToNode: mutable.Map[Int, DSN] = mutable.Map() formals.foreach{ case (variable: Variable, (cell: DSC, internalOffset: BigInt)) => - assert(newGraph.formals.contains(variable)) +// assert(newGraph.formals.contains(variable)) val node = cell.node.get if !idToNode.contains(node.id) then val newNode = node.cloneSelf(newGraph) @@ -448,10 +473,12 @@ class DSG(val proc: Procedure, varToCell.foreach { case (position: CFGPosition, values: mutable.Map[Variable, (DSC, BigInt)]) => - assert(newGraph.varToCell.contains(position)) +// assert(newGraph.varToCell.contains(position)) + if !newGraph.varToCell.contains(position) then + newGraph.varToCell.update(position, mutable.Map[Variable, (DSC, BigInt)]()) values.foreach{ case (variable: Variable, (cell: DSC, internalOffset: BigInt)) => - assert(newGraph.varToCell(position).contains(variable)) +// assert(newGraph.varToCell(position).contains(variable)) val node = cell.node.get if !idToNode.contains(node.id) then val newNode = node.cloneSelf(newGraph) @@ -478,26 +505,43 @@ class DSG(val proc: Procedure, newGraph.globalMapping.update((start, end), (idToNode(node.id), internalOffset)) } + pointTo.foreach { + case (cell1: DSC, (cell2: DSC, internalOffset: BigInt)) => + val node1 = cell1.node.get + val node2 = cell2.node.get + if !idToNode.contains(node1.id) then + val newNode1 = node1.cloneSelf(newGraph) + idToNode.update(node1.id, newNode1) + + if !idToNode.contains(node2.id) then + val newNode2 = node2.cloneSelf(newGraph) + idToNode.update(node2.id, newNode2) + + newGraph.pointTo.update(idToNode(node1.id).cells(cell1.offset), (idToNode(node2.id).cells(cell2.offset), internalOffset)) + } + callsites.foreach( callSite => val cs = CallSite(callSite.call, newGraph) newGraph.callsites.add(cs) assert(cs.paramCells.keySet.equals(callSite.paramCells.keySet)) callSite.paramCells.foreach{ - case (variable: Variable, cell: DSC) => + case (variable: Variable, (cell: DSC, internal: BigInt)) => assert(cs.paramCells.contains(variable)) val id = cell.node.get.id - cs.paramCells.update(variable, idToNode(id).cells(cell.offset)) + cs.paramCells.update(variable, (idToNode(id).cells(cell.offset), internal)) } callSite.returnCells.foreach{ - case (variable: Variable, cell: DSC) => + case (variable: Variable, (cell: DSC, internal: BigInt)) => assert(cs.returnCells.contains(variable)) val id = cell.node.get.id - cs.returnCells.update(variable, idToNode(id).cells(cell.offset)) + cs.returnCells.update(variable, (idToNode(id).cells(cell.offset), internal)) } ) + + newGraph.nodes.addAll(idToNode.values) newGraph @@ -594,14 +638,40 @@ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCount node def cloneNode(from: DSG, to: DSG): Unit = - assert(from.equals(graph.get)) +// assert(from.nodes.contains(this)) TODO update nodes after each phase for to check this assertion if !to.nodes.contains(this) then to.nodes.add(this) + + + from.varToCell.foreach( + t => + val pos = t._1 + val varMap = t._2 + varMap.foreach{ + case (variable: Variable, (cell: DSC, internal: BigInt)) => + if cell.node.get.equals(this) then + to.varToCell.update( + pos, + to.varToCell.getOrElseUpdate(pos, + mutable.Map[Variable, (DSC, BigInt)]()) ++ Map(variable -> (cell, internal)) + ) + } + ) + from.formals.foreach{ + case (variable: Variable, (cell: DSC, internal: BigInt)) => + if cell.node.get.equals(this) then + to.varToCell.update( + begin(from.proc), + to.varToCell.getOrElseUpdate(begin(from.proc), + mutable.Map[Variable, (DSC, BigInt)]()) ++ Map(variable -> (cell, internal)) + ) + } + cells.foreach { case (offset: BigInt, cell: DSC) => if from.pointTo.contains(cell) then val pointee = from.getPointee(cell) - pointee.node.get.cloneNode(to,from) + pointee._1.node.get.cloneNode(from, to) to.pointTo.update(cell, pointee) } @@ -631,17 +701,17 @@ case class DSC(node: Option[DSN], offset: BigInt) class CallSite(val call: DirectCall, val graph: DSG) { val proc = call.target - val paramCells: mutable.Map[Variable, DSC] = graph.params(proc).foldLeft(mutable.Map[Variable, DSC]()) { + val paramCells: mutable.Map[Variable, (DSC, BigInt)] = graph.params(proc).foldLeft(mutable.Map[Variable, (DSC, BigInt)]()) { (m, reg) => val node = DSN(Some(graph)) node.flags.incomplete = true - m += (reg -> node.cells(0)) + m += (reg -> (node.cells(0), 0)) } - val returnCells: mutable.Map[Variable, DSC] = graph.writesTo(proc).foldLeft(mutable.Map[Variable, DSC]()) { + val returnCells: mutable.Map[Variable, (DSC, BigInt)] = graph.writesTo(proc).foldLeft(mutable.Map[Variable, (DSC, BigInt)]()) { (m, reg) => val node = DSN(Some(graph)) node.flags.incomplete = true - m += (reg -> node.cells(0)) + m += (reg -> (node.cells(0), 0)) } } @@ -677,6 +747,15 @@ def twosComplementToDec(binary: Array[Int]): BigInt = { result } +def adjust(cell: DSC, internalOffset: BigInt): DSC = + val node = cell.node.get + node.addCell(cell.offset+internalOffset, 0) + +def adjust(tuple: (DSC, BigInt)): DSC = + val cell = tuple._1 + val internal = tuple._2 + adjust(cell, internal) + val BITVECNEGATIVE: BigInt = new BigInt(new BigInteger("9223372036854775808")) diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index f4ed35139..c09d650ed 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -126,11 +126,16 @@ class Local( node.addCell(field, size) // add cell there if doesn't already exists if node.collapsed then field = 0 - graph.mergeCells(c, if pointee then graph.getPointee(node.getCell(field)) else node.getCell(field)) + graph.mergeCells(c, + if pointee then + graph.getPointeeAdjusted(node.getCell(field)) + else + node.getCell(field) + ) else val node = cell.node.get graph.collapseNode(node) - graph.mergeCells(c, if pointee then graph.getPointee(node.cells(0)) else node.cells(0)) + graph.mergeCells(c, if pointee then graph.getPointeeAdjusted(node.cells(0)) else node.cells(0)) } if pointee then @@ -140,10 +145,10 @@ class Local( val internalOffset = t._2 val node = t._1.node.get val cell = node.getCell(offset + internalOffset) - if graph.pointTo.contains(cell) && graph.pointTo(cell).equals(result) then + if graph.pointTo.contains(cell) && graph.pointTo(cell)._1.equals(result) then graph.optionalCollapse(node) assert(graph.pointTo.contains(node.getCell(offset))) - result = graph.getPointee(node.getCell(offset)) + result = graph.getPointee(node.getCell(offset))._1 else graph.optionalCollapse(node) ) @@ -171,18 +176,25 @@ class Local( val cs = CallSite(call, graph) graph.callsites.add(cs) cs.paramCells.foreach{ - case (variable: Variable, cell: DSC) => - visitPointerArithmeticOperation(call, cell, variable, 0) + case (variable: Variable, (cell: DSC, internal: BigInt)) => + // TODO assert(false) + val node = cell.node.get + val adjusted = node.addCell(cell.offset + internal, 0) + visitPointerArithmeticOperation(call, adjusted, variable, 0) } cs.returnCells.foreach{ - case (variable: Variable, cell: DSC) => + case (variable: Variable, (cell: DSC, internal: BigInt)) => val returnArgument = graph.varToCell(n)(variable)._1 - graph.mergeCells(returnArgument, cell) + val returnArgumenetInternal = graph.varToCell(n)(variable)._2 + val returnArgumentNode = returnArgument.node.get + val adjustedReturnArgument = returnArgumentNode.addCell(returnArgument.offset + returnArgumenetInternal, 0) + val node = cell.node.get + val adjustedCell = node.addCell(cell.offset + internal, 0) + graph.mergeCells(adjustedReturnArgument, adjustedCell) } - print("") case LocalAssign(variable, rhs, maybeString) => val expr: Expr = unwrapPaddingAndSlicing(rhs) - val lhsCell = graph.varToCell(n)(variable)._1 + val lhsCell = adjust(graph.varToCell(n)(variable)) if isGlobal(expr, n).isDefined then val global = isGlobal(expr, n).get graph.mergeCells(lhsCell, global) @@ -211,10 +223,10 @@ class Local( lhsCell.node.get.flags.read = true if isGlobal(index, n, byteSize).isDefined then val global = isGlobal(index, n, byteSize).get - graph.mergeCells(lhsCell, graph.getPointee(global)) + graph.mergeCells(lhsCell, graph.getPointeeAdjusted(global)) else if isStack(index, n).isDefined then val stack = isStack(index, n).get - graph.mergeCells(lhsCell, graph.getPointee(stack)) + graph.mergeCells(lhsCell, graph.getPointeeAdjusted(stack)) else index match case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => @@ -253,12 +265,13 @@ class Local( case MemoryAssign(memory, MemoryStore(mem, index, expr: Expr, endian, size), label) if unwrapPaddingAndSlicing(expr).isInstanceOf[Variable] => // if value is a literal ignore it val value: Variable = unwrapPaddingAndSlicing(expr).asInstanceOf[Variable] + reachingDefs(n)(value).foreach(visit) val byteSize = (size.toDouble/8).ceil.toInt val addressPointee: DSC = if isGlobal(index, n, byteSize).isDefined then - graph.getPointee(isGlobal(index, n, byteSize).get) + graph.getPointeeAdjusted(isGlobal(index, n, byteSize).get) else if isStack(index, n).isDefined then - graph.getPointee(isStack(index, n).get) + graph.getPointeeAdjusted(isStack(index, n).get) else index match case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => @@ -273,14 +286,14 @@ class Local( addressPointee.node.get.flags.modified = true val valueCells = graph.getCells(n, value) val result = valueCells.foldLeft(addressPointee) { - (c, p) => - graph.mergeCells(p._1, c) + (c, slice) => + graph.mergeCells(adjust(slice), c) } case _ => } def analyze(): DSG = - val domain = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString) + val domain = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).reverse domain.foreach(visit) diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index d567a2fc2..9c2562514 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -61,7 +61,9 @@ case class StaticAnalysisContext( memoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], symbolicAccessess: Map[CFGPosition, Map[SymbolicAccess, TwoElement]], - dsg: Option[DSG] + locals: Option[Map[Procedure, DSG]], + bus: Option[Map[Procedure, DSG]], + tds: Option[Map[Procedure, DSG]], ) /** Results of the main program execution. @@ -711,7 +713,10 @@ object StaticAnalysis { memoryRegionContents = memoryRegionContents, reachingDefs = reachingDefinitionsAnalysisResults, symbolicAccessess = symResults, - dsg = None, + locals = None, + bus = None, + tds = None, + reachingDefs = reachingDefinitionsAnalysisResults ) } @@ -958,8 +963,9 @@ object RunUtils { s => writeToFile(toDot(ctx.program), s"${s}_ct.dot") ) - val b = Local(ctx.program.mainProcedure, analysisResult.last.symbolicAccessess, analysisResult.last.IRconstPropResult, ctx.globals, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, analysisResult.last.paramResults).analyze() - val c = DSA(ctx.program, analysisResult.last.symbolicAccessess, analysisResult.last.IRconstPropResult, ctx.globals, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, analysisResult.last.paramResults).analyze() +// val b = Local(ctx.program.mainProcedure, analysisResult.last.symbolicAccessess, analysisResult.last.IRconstPropResult, ctx.globals, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, analysisResult.last.paramResults).analyze() + val dsa = DSA(ctx.program, analysisResult.last.symbolicAccessess, analysisResult.last.IRconstPropResult, ctx.globals, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, analysisResult.last.paramResults) + dsa.analyze() Logger.info(s"[!] Finished indirect call resolution after $iteration iterations") StaticAnalysisContext( @@ -974,7 +980,9 @@ object RunUtils { mmmResults = analysisResult.last.mmmResults, memoryRegionContents = analysisResult.last.memoryRegionContents, symbolicAccessess = analysisResult.last.symbolicAccessess, - dsg = Some(b), + locals = Some(dsa.locals.toMap), + bus = Some(dsa.bu.toMap), + tds = Some(dsa.td.toMap), reachingDefs = analysisResult.last.reachingDefs ) } diff --git a/src/test/scala/LocalTest.scala b/src/test/scala/LocalTest.scala index 81be26fe9..2c7bdc62e 100644 --- a/src/test/scala/LocalTest.scala +++ b/src/test/scala/LocalTest.scala @@ -1,4 +1,4 @@ -import analysis.{DSC, DSG, DSN} +import analysis.{DSC, DSG, DSN, DataRegion2, HeapRegion2} import ir.Endian.BigEndian import ir.{BVADD, BinaryExpr, BitVecLiteral, ConvertToSingleProcedureReturn, DirectCall, LocalAssign, Memory, MemoryAssign, MemoryLoad, MemoryStore} import org.scalatest.funsuite.AnyFunSuite @@ -9,6 +9,7 @@ import util.{BASILConfig, BoogieGeneratorConfig, ILLoadingConfig, IRContext, Run class LocalTest extends AnyFunSuite, TestUtil { + // Local DSA tests test("basic pointer") { val results = RunUtils.loadAndTranslate( BASILConfig( @@ -23,20 +24,116 @@ class LocalTest extends AnyFunSuite, TestUtil { outputPrefix = "boogie_out", ) ) - val dsg = results.analysis.get.dsg.get + val program = results.ir.program + val dsg = results.analysis.get.locals.get(program.mainProcedure) + println(dsg.stackMapping) assert(dsg.pointTo.size == 9) - val framePointer = DSC(Some(DSN(None, 0, 1)), 0) // R31 - assert(dsg.pointTo(framePointer).equals(dsg.formals(R29)._1)) - val stack8 = DSC(Some(DSN(None, 0, 2)), 0) // R31 + 8 - assert(dsg.pointTo(stack8).equals(dsg.formals(R30)._1)) - val stack40 = DSC(Some(DSN(None, 0, 3)), 0) // R31 + 40 - val stack32 = DSC(Some(DSN(None, 0, 5)), 0) // R31 + 32 - val stack24 = dsg.pointTo(stack32) // R31 + 24 and Malloc + val framePointer = dsg.stackMapping(0).cells(0) // R31 + assert(dsg.pointTo(framePointer)._1.equals(dsg.formals(R29)._1)) + val stack8 = dsg.stackMapping(8).cells(0) // R31 + 8 + assert(dsg.pointTo(stack8)._1.equals(dsg.formals(R30)._1)) + val stack40 = dsg.stackMapping(40).cells(0) // R31 + 40 + val stack32 = dsg.stackMapping(32).cells(0) // R31 + 32 + val stack24 = dsg.stackMapping(24).cells(0) // R31 + 24 and Malloc + assert(dsg.pointTo(stack32)._1.equals(stack24)) assert(stack24.node.get.collapsed) - assert(dsg.pointTo(stack24).equals(stack24)) - assert(dsg.pointTo(stack40).equals(dsg.getPointee(dsg.getPointee(DSC(Some(DSN(None,0, 12)), 0))))) + assert(dsg.pointTo(stack24)._1.equals(stack24)) -// assert(dsg.pointTo.contains(framePointer)) + assert(dsg.pointTo(stack40)._1.equals(dsg.getPointee(dsg.getPointee(dsg.globalMapping((69600, 69600))._1.cells(0))._1)._1)) + + } + + test("unsafe pointer arithmetic") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.adt", + relfFile = "examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.locals.get(program.mainProcedure) + val stack0 = dsg.stackMapping(0).cells(0) + val stack8 = dsg.stackMapping(8).cells(0) + val stack24 = dsg.stackMapping(24).cells(0) + val stack32 = dsg.stackMapping(32).cells(0) + val stack40 = dsg.stackMapping(40).cells(0) + val stack48 = dsg.stackMapping(48).cells(0) + val stack56 = dsg.stackMapping(56).cells(0) + assert(dsg.pointTo.size==9) + assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) + assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) + assert(dsg.pointTo(stack24).equals(dsg.pointTo(stack32))) + assert(dsg.pointTo(stack24)._2 == 0) + assert(dsg.pointTo(stack24)._1.node.get.allocationRegions.size == 1) + assert(dsg.pointTo(stack24)._1.node.get.allocationRegions.head.asInstanceOf[HeapRegion2].size == 20) + assert(dsg.pointTo(stack40)._1.node.get.allocationRegions.size == 1) + assert(dsg.pointTo(stack48)._1.node.get.allocationRegions.head.asInstanceOf[HeapRegion2].size == 8) + assert(dsg.pointTo(dsg.pointTo(stack48)._1.node.get.cells(0)).equals(dsg.pointTo(stack40))) + assert(dsg.pointTo(dsg.pointTo(stack48)._1.node.get.cells(0)).equals(dsg.pointTo(stack56))) + assert(dsg.pointTo(stack24)._1.equals(dsg.pointTo(stack40)._1)) + assert(dsg.pointTo(stack40)._2 == 1) + } + + test("interproc pointer arithmetic main") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", + relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.locals.get(program.mainProcedure) + val stack0 = dsg.stackMapping(0).cells(0) + val stack8 = dsg.stackMapping(8).cells(0) + val stack24 = dsg.stackMapping(24).cells(0) + val stack32 = dsg.stackMapping(32).cells(0) + val stack40 = dsg.stackMapping(40).cells(0) + assert(dsg.pointTo.size == 8) + assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) + assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) + assert(dsg.pointTo(stack24)._1.node.get.equals(dsg.pointTo(stack32)._1.node.get)) + assert(dsg.pointTo(stack24)._1.offset == 0) + assert(dsg.pointTo(stack32)._1.offset == 16) + assert(dsg.pointTo.contains(dsg.pointTo(stack40)._1)) + assert(!dsg.pointTo(stack40)._1.node.get.equals(dsg.pointTo(stack24)._1.node.get)) + } + + test("interproc pointer arithmetic callee") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", + relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.locals.get(program.procs("callee")) + val stack8 = dsg.stackMapping(8).cells(0) // R31 + 8 + val stack24 = dsg.stackMapping(24).cells(0) // R31 + 24 + assert(dsg.pointTo.size == 2) + assert(dsg.getPointee(stack8).equals(dsg.formals(R0))) + assert(dsg.getPointee(stack8)._1.offset == 0) + assert(dsg.getPointee(stack24)._1.equals(dsg.formals(R0)._1.node.get.cells(16))) } @@ -61,13 +158,13 @@ class LocalTest extends AnyFunSuite, TestUtil { program = returnUnifier.visitProgram(program) val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Map.empty, Specification(Set(), Map(), List(), List(), List(), Set()), program)) - val dsg: DSG = results.dsg.get + val dsg: DSG = results.locals.get(program.mainProcedure) assert(dsg.formals(R1).equals(dsg.formals(R2))) assert(dsg.varToCell(locAssign1)(R6)._1.equals(dsg.varToCell(locAssign2)(R7)._1)) assert(dsg.varToCell(locAssign1)(R6)._2 == 0) assert(dsg.varToCell(locAssign2)(R7)._2 == 1) assert(dsg.pointTo.contains(dsg.varToCell(locAssign1)(R6)._1)) - assert(dsg.pointTo(dsg.varToCell(locAssign1)(R6)._1).equals(dsg.formals(R1)._1)) + assert(dsg.pointTo(dsg.varToCell(locAssign1)(R6)._1)._1.equals(dsg.formals(R1)._1)) assert(dsg.pointTo.size == 1) } @@ -95,7 +192,7 @@ class LocalTest extends AnyFunSuite, TestUtil { program = returnUnifier.visitProgram(program) val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Map.empty, Specification(Set(), Map(), List(), List(), List(), Set()), program)) - val dsg: DSG = results.dsg.get + val dsg: DSG = results.locals.get(program.mainProcedure) assert(dsg.varToCell(locAssign3)(R5)._1.offset == 13) } @@ -123,7 +220,7 @@ class LocalTest extends AnyFunSuite, TestUtil { program = returnUnifier.visitProgram(program) val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Map.empty, Specification(Set(), Map(), List(), List(), List(), Set()), program)) - val dsg: DSG = results.dsg.get + val dsg: DSG = results.locals.get(program.mainProcedure) assert(dsg.formals(R1).equals(dsg.formals(R2))) assert(dsg.varToCell(locAssign1)(R6)._1.equals(dsg.varToCell(locAssign2)(R7)._1)) assert(dsg.varToCell(locAssign1)(R6)._1.equals(dsg.varToCell(locAssign3)(R5)._1)) @@ -131,7 +228,7 @@ class LocalTest extends AnyFunSuite, TestUtil { assert(dsg.varToCell(locAssign2)(R7)._2 == 1) assert(dsg.varToCell(locAssign3)(R5)._2 == 8) assert(dsg.pointTo.contains(dsg.varToCell(locAssign1)(R6)._1)) - assert(dsg.pointTo(dsg.varToCell(locAssign1)(R6)._1).equals(dsg.formals(R1)._1)) + assert(dsg.pointTo(dsg.varToCell(locAssign1)(R6)._1)._1.equals(dsg.formals(R1)._1)) assert(dsg.pointTo.size == 1) } @@ -159,7 +256,134 @@ class LocalTest extends AnyFunSuite, TestUtil { program = returnUnifier.visitProgram(program) val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Map.empty, Specification(Set(), Map(), List(), List(), List(), Set()), program)) - val dsg: DSG = results.dsg.get + val dsg: DSG = results.locals.get(program.mainProcedure) assert(dsg.varToCell(locAssign2)(R7).equals(dsg.varToCell(locAssign3)(R5))) } + + // bottom up tests + test("bottom up interproc pointer arithmetic callee") { + // same as interproc pointer arithmetic callee's local graph (no changes should have been made) + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", + relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.bus.get(program.procs("callee")) + val stack8 = dsg.stackMapping(8).cells(0) // R31 + 8 + val stack24 = dsg.stackMapping(24).cells(0) // R31 + 24 + assert(dsg.pointTo.size == 2) + assert(dsg.getPointee(stack8).equals(dsg.formals(R0))) + assert(dsg.getPointee(stack8)._1.offset == 0) + assert(dsg.getPointee(stack24)._1.equals(dsg.formals(R0)._1.node.get.cells(16))) + } + + + test("bottom up interproc pointer arithmetic main") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", + relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.bus.get(program.mainProcedure) + val stack0 = dsg.stackMapping(0).cells(0) + val stack8 = dsg.stackMapping(8).cells(0) + val stack24 = dsg.stackMapping(24).cells(0) + val stack32 = dsg.stackMapping(32).cells(0) + val stack40 = dsg.stackMapping(40).cells(0) + assert(dsg.pointTo.size == 8) + assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) + assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) + assert(dsg.pointTo(stack24)._1.node.get.equals(dsg.pointTo(stack32)._1.node.get)) + assert(dsg.pointTo(stack24)._1.offset == 0) + assert(dsg.pointTo(stack32)._1.offset == 16) + assert(dsg.pointTo.contains(dsg.pointTo(stack40)._1)) + assert(dsg.pointTo(stack40)._1.node.get.equals(dsg.pointTo(stack24)._1.node.get)) + assert(dsg.pointTo(stack40)._1.offset == 32) + assert(dsg.pointTo(stack40)._2 == 0) + assert(dsg.pointTo(stack32)._2 == 0) + assert(dsg.pointTo(stack24)._2 == 0) + } + + + // top down tests + test("top down interproc pointer arithmetic callee") { + // same as interproc pointer arithmetic callee's local graph (no changes should have been made) + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", + relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.tds.get(program.procs("callee")) + val stack8 = dsg.stackMapping(8).cells(0) // R31 + 8 + val stack24 = dsg.stackMapping(24).cells(0) // R31 + 24 + assert(dsg.pointTo.size == 5) + assert(dsg.getPointee(stack8).equals(dsg.formals(R0))) + assert(dsg.getPointee(stack8)._1.offset == 16) + assert(dsg.getPointee(stack24)._1.equals(dsg.formals(R0)._1.node.get.cells(32))) + } + + + // top down phase should be the same as bu phase + test("top down interproc pointer arithmetic main") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", + relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.tds.get(program.mainProcedure) + val stack0 = dsg.stackMapping(0).cells(0) + val stack8 = dsg.stackMapping(8).cells(0) + val stack24 = dsg.stackMapping(24).cells(0) + val stack32 = dsg.stackMapping(32).cells(0) + val stack40 = dsg.stackMapping(40).cells(0) + assert(dsg.pointTo.size == 8) + assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) + assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) + assert(dsg.pointTo(stack24)._1.node.get.equals(dsg.pointTo(stack32)._1.node.get)) + assert(dsg.pointTo(stack24)._1.offset == 0) + assert(dsg.pointTo(stack32)._1.offset == 16) + assert(dsg.pointTo.contains(dsg.pointTo(stack40)._1)) + assert(dsg.pointTo(stack40)._1.node.get.equals(dsg.pointTo(stack24)._1.node.get)) + assert(dsg.pointTo(stack40)._1.offset == 32) + assert(dsg.pointTo(stack40)._2 == 0) + assert(dsg.pointTo(stack32)._2 == 0) + assert(dsg.pointTo(stack24)._2 == 0) + } + } From d4f97f33a67aec189bac570885eca41031b6cd17 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Wed, 22 May 2024 16:23:06 +1000 Subject: [PATCH 012/104] handling relocations --- src/main/scala/analysis/DSA.scala | 101 +-- src/main/scala/analysis/DSAUtility.scala | 104 +-- src/main/scala/analysis/Local.scala | 90 +-- .../analysis/SymbolicAccessAnalysis.scala | 4 +- src/main/scala/util/RunUtils.scala | 33 +- src/test/scala/LocalTest.scala | 618 +++++++++++++++++- 6 files changed, 779 insertions(+), 171 deletions(-) diff --git a/src/main/scala/analysis/DSA.scala b/src/main/scala/analysis/DSA.scala index 281a38c38..78d8e2454 100644 --- a/src/main/scala/analysis/DSA.scala +++ b/src/main/scala/analysis/DSA.scala @@ -38,10 +38,22 @@ class DSA(program: Program, val queue = mutable.Queue[Procedure]() override def analyze(): Map[Procedure, DSG] = { - val domain = computeDomain(CallGraph, Set(program.mainProcedure)) + var domain : Set[Procedure] = Set(program.mainProcedure) + val stack : mutable.Stack[Procedure] = mutable.Stack() + stack.pushAll(program.mainProcedure.calls) + + while stack.nonEmpty do + val current = stack.pop() + domain += current + stack.pushAll(current.calls.diff(domain)) + + +// computeDomain(CallGraph, Set(program.mainProcedure)) domain.foreach( proc => val dsg = Local(proc, symResults, constProp, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params).analyze() +// println(s"Node Counter before local for ${proc.name} : " + NodeCounter.counter) + locals.update(proc, dsg) bu.update(proc, dsg.cloneSelf()) ) @@ -52,10 +64,12 @@ class DSA(program: Program, proc => assert(locals(proc).callsites.isEmpty) visited += proc - queue.enqueueAll(CallGraph.pred(proc).diff(visited)) + val preds : Set[Procedure] = CallGraph.pred(proc) + queue.enqueueAll(CallGraph.pred(proc).diff(visited).intersect(domain)) // CallGraph.pred(proc).foreach(buildBUQueue) ) +// println("Node Counter before bottom up: " + NodeCounter.counter) while queue.nonEmpty do val proc = queue.dequeue() visited += proc @@ -66,7 +80,14 @@ class DSA(program: Program, callSite => val callee = callSite.proc val calleeGraph = locals(callee) //.cloneSelf() + assert(buGraph.globalMapping.keySet.equals(calleeGraph.globalMapping.keySet)) assert(calleeGraph.formals.keySet.diff(ignoreRegisters).equals(callSite.paramCells.keySet)) + + calleeGraph.globalMapping.foreach { + case (range: (BigInt, BigInt), (node: DSN, internal: BigInt)) => + node.cloneNode(calleeGraph, buGraph) + } + calleeGraph.formals.foreach{ case (variable: Variable, (cell: DSC, internalOffset: BigInt)) if !ignoreRegisters.contains(variable) => assert(callSite.paramCells.contains(variable)) @@ -88,8 +109,15 @@ class DSA(program: Program, } ) - // TODO // assert(calleeGraph.formals.isEmpty || buGraph.varToCell(begin(callee)).equals(calleeGraph.formals)) + val globalNodes: mutable.Map[Int, DSN] = mutable.Map() + calleeGraph.globalMapping.foreach { + case (range: (BigInt, BigInt), (node: DSN, internal: BigInt)) => + buGraph.mergeCells(buGraph.globalMapping(range)._1.getCell(buGraph.globalMapping(range)._2), + node.getCell(internal)) +// node.cloneNode(calleeGraph, buGraph) + } + buGraph.varToCell.getOrElse(begin(callee), Map.empty).foreach{ case (variable: Variable, formal) if !ignoreRegisters.contains(variable) => buGraph.mergeCells(adjust(formal), adjust(callSite.paramCells(variable))) @@ -104,34 +132,9 @@ class DSA(program: Program, case (c: DSC, ret) => buGraph.mergeCells(c, adjust(ret)) } - - returnCells.foreach{ - case (cell: DSC, offset: BigInt) => - calleeGraph.replace(cell, result, 0) - } ) ) -// buGraph.callsites.foreach(//unify nodes -// callSite => -// val callee = callSite.proc -// val calleeGraph = locals(callee) //.cloneSelf() -// calleeGraph.formals.foreach{ -// case (variable: Variable, formal) if !ignoreRegisters.contains(variable) => -// // TODO merges here should update the node in the callee -// buGraph.mergeCells(adjust(formal), adjust(callSite.paramCells(variable))) -// case _ => -// } -// writesTo(callee).foreach( -// reg => -// val returnCells = calleeGraph.getCells(end(callee), reg) -//// assert(returnCells.nonEmpty) -// returnCells.foldLeft(adjust(callSite.returnCells(reg))){ -// // -// case (c: DSC, ret) => -// buGraph.mergeCells(c, adjust(ret)) -// } -// ) -// ) + buGraph.collectNodes // bottom up phase finished // clone bu graphs to top-down graphs domain.foreach( @@ -141,6 +144,9 @@ class DSA(program: Program, queue.enqueue(program.mainProcedure) visited = Set() + +// println("Node Counter before top down: " + NodeCounter.counter) + while queue.nonEmpty do val proc = queue.dequeue() visited += proc @@ -150,6 +156,14 @@ class DSA(program: Program, callSite => val callee = callSite.proc val calleesGraph = td(callee) + assert(callersGraph.globalMapping.keySet.equals(calleesGraph.globalMapping.keySet)) + + callersGraph.globalMapping.foreach { + case (range: (BigInt, BigInt), (node: DSN, internal: BigInt)) => + node.cloneNode(callersGraph, calleesGraph) + } + + callSite.paramCells.foreach{ case (variable: Variable, (cell: DSC, internalOffset: BigInt)) => val node = cell.node.get @@ -162,6 +176,14 @@ class DSA(program: Program, node.cloneNode(callersGraph, callersGraph) } + + callersGraph.globalMapping.foreach { + case (range: (BigInt, BigInt), (node: DSN, internal: BigInt)) => + calleesGraph.mergeCells(calleesGraph.globalMapping(range)._1.getCell(calleesGraph.globalMapping(range)._2), + node.getCell(internal)) + // node.cloneNode(calleeGraph, buGraph) + } + callSite.paramCells.keySet.foreach( variable => val paramCells = calleesGraph.getCells(callSite.call, variable) @@ -180,26 +202,7 @@ class DSA(program: Program, } } ) - -// callersGraph.callsites.foreach( -// callSite => -// val callee = callSite.proc -// val calleesGraph = td(callee) -// callSite.paramCells.foreach { -// case (variable: Variable, cell) => -// calleesGraph.mergeCells(adjust(cell), adjust(calleesGraph.formals(variable))) -// } -// -// callSite.returnCells.foreach { -// case (variable: Variable, cell: (DSC, BigInt)) => -// val returnCells = calleesGraph.getCells(end(callee), variable) -// returnCells.foldLeft(adjust(cell)){ -// case (c: DSC, retCell: (DSC, BigInt)) => -// calleesGraph.mergeCells(c, adjust(retCell)) -// } -// } -// ) - + callersGraph.collectNodes td.toMap } diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index c22fe4033..9b18f1fd4 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -1,12 +1,13 @@ package analysis -import ir.{begin, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IntraProcIRCursor, Literal, LocalAssign, Memory, MemoryAssign, MemoryLoad, MemoryStore, Procedure, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, computeDomain, toShortString} +import ir.{BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IntraProcIRCursor, Literal, LocalAssign, Memory, MemoryAssign, MemoryLoad, MemoryStore, Procedure, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, begin, computeDomain, toShortString} import specification.{ExternalFunction, SpecGlobal} +import util.writeToFile import scala.util.control.Breaks.{break, breakable} import java.math.BigInteger +import java.util.StringJoiner import scala.collection.mutable - import scala.collection.mutable object NodeCounter { @@ -15,8 +16,6 @@ object NodeCounter { def getCounter: Int = counter = counter + 1 counter - - } class DSG(val proc: Procedure, @@ -97,36 +96,62 @@ class DSG(val proc: Procedure, // make all globals private val swappedOffsets = globalOffsets.map(_.swap) - val globalMapping: mutable.Map[(BigInt, BigInt), (DSN, BigInt)] = globals.foldLeft(mutable.Map[(BigInt, BigInt), (DSN, BigInt)]()) { - (m, global) => - var address: BigInt = global.address - if swappedOffsets.contains(address) then - address = swappedOffsets(address) + val globalMapping: mutable.Map[(BigInt, BigInt), (DSN, BigInt)] = mutable.Map[(BigInt, BigInt), (DSN, BigInt)]() + globals.foreach( + global => val node = DSN(Some(this), global.size) - node.allocationRegions.add(DataRegion2(global.name, address, global.size)) + node.allocationRegions.add(DataRegion2(global.name, global.address, global.size/8)) node.flags.global = true node.flags.incomplete = true - m + ((address, address + global.size/8) -> (node, 0)) + globalMapping.update((global.address, global.address + global.size/8), (node, 0)) + ) + + globals.foreach( + global => + var address = global.address + breakable { + while swappedOffsets.contains(address) do + val relocatedAddress = swappedOffsets(address) + if relocatedAddress == address then + break + + var field: BigInt = 0 + val node: DSN = isGlobal(relocatedAddress) match + case Some(value) => + field = relocatedAddress - value._1._1 + val node = value._2._1 + node.addCell(field, 8) + node + + case None => + val node = DSN(Some(this)) + node.allocationRegions.add(DataRegion2(s"Relocated_$relocatedAddress", relocatedAddress, 8)) + node.flags.global = true + node.flags.incomplete = true + globalMapping.update((relocatedAddress, relocatedAddress + 8), (node, 0)) + node + + pointTo.update(node.cells(field), (isGlobal(address).get._2._1.cells(0), 0)) + address = relocatedAddress + } + ) - } externalFunctions.foreach( external => - var address: BigInt = external.offset - if swappedOffsets.contains(address) then - address = swappedOffsets(address) val node = DSN(Some(this)) - node.allocationRegions.add(DataRegion2(external.name, address, 0)) + node.allocationRegions.add(DataRegion2(external.name, external.offset, 0)) node.flags.global = true node.flags.incomplete = true - globalMapping.update((address, address), (node, 0)) + globalMapping.update((external.offset, external.offset), (node, 0)) ) + // determine if an address is a global and return the corresponding global if it is. def isGlobal(address: BigInt): Option[((BigInt, BigInt), (DSN, BigInt))] = for (elem <- globalMapping) { val range = elem._1 - if address >= range._1 && address <= range._2 then + if address >= range._1 && (address < range._2 || (range._1 == range._2 && range._2 == address)) then return Some(elem) } None @@ -137,7 +162,6 @@ class DSG(val proc: Procedure, m.foreach { case (variable, (cell, offset)) => if cell.equals(oldCell) then -// if cell.node.equals(oldCell.node) && cell.offset + offset == oldCell.offset then m.update(variable, (newCell, offset + internalOffsetChange)) } ) @@ -145,7 +169,6 @@ class DSG(val proc: Procedure, formals.foreach{ case (variable, (cell, offset)) => if cell.equals(oldCell) then -// if cell.node.equals(oldCell.node) && cell.offset + offset == oldCell.offset then formals.update(variable, (newCell, offset + internalOffsetChange)) } @@ -153,7 +176,6 @@ class DSG(val proc: Procedure, pointTo.foreach { case (pointer, (cell: DSC, pointeeInternal: BigInt)) => if cell.equals(oldCell) then -// if cell.node.equals(oldCell.node) && cell.offset + pointeeInternal == oldCell.offset then pointTo.update(pointer, (newCell, pointeeInternal + internalOffsetChange)) } @@ -181,14 +203,12 @@ class DSG(val proc: Procedure, callSite.returnCells.foreach{ case (variable: Variable, (cell: DSC, internal: BigInt)) => if cell.equals(oldCell) then -// if cell.node.equals(oldCell.node) && cell.offset + internal == oldCell.offset then callSite.returnCells.update(variable, (newCell, internal + internalOffsetChange)) } callSite.paramCells.foreach{ case (variable: Variable, (cell: DSC, internal: BigInt)) => if cell.equals(oldCell) then -// if cell.node.equals(oldCell.node) && cell.offset + internal == oldCell.offset then callSite.paramCells.update(variable, (newCell, internal + internalOffsetChange)) } ) @@ -219,6 +239,14 @@ class DSG(val proc: Procedure, else Set(formals(arg)) + def collectNodes = + nodes.clear() + nodes.addAll(formals.values.map(_._1.node.get)) + varToCell.values.foreach( + value => nodes.addAll(value.values.map(_._1.node.get)) + ) + nodes.addAll(stackMapping.values) + nodes.addAll(globalMapping.values.map(_._1)) def collapseNode(node: DSN): Unit = val collapedCell = DSC(Option(node), 0) val e = DSC(None, 0) @@ -283,7 +311,6 @@ class DSG(val proc: Procedure, val (cell2Pointee: DSC, pointee2Internal: BigInt) = getPointee(cell2) val result = mergeCells(getPointeeAdjusted(cell1), getPointeeAdjusted(cell2)) assert(pointTo(cell1)._1.equals(result)) - // TODO pointTo.update(cell1, (result,pointee2Internal.max(pointee1Internal))) else pointTo.update(cell1, getPointee(cell2)) @@ -322,12 +349,6 @@ class DSG(val proc: Procedure, pointTo.remove(node1.cells(0)) replace(node1.cells(0), node2.cells(0), 0) node2.cells(0) -// else if cell1.node.get.allocationRegions.isEmpty && cell1.offset == 0 && cell1.node.get.cells.size == 1 && cell1.largestAccessedSize == 0 && // -// !pointTo.contains(cell1) && pointTo.values.foldLeft(true) { -// (condition, cell) => cell != cell1 && condition -// } then -// replace(cell1, cell2, 0) -// cell2 else var delta = cell1.offset - cell2.offset @@ -378,7 +399,7 @@ class DSG(val proc: Procedure, (set, cell) => // replace incoming edges if cell.node.get.equals(node2) then - replace(cell, collapsedCell, delta + cell.offset - offset) // TODO reconsider offsets + replace(cell, collapsedCell, delta + cell.offset - offset) else assert(cell.node.get.equals(node1)) replace(cell, collapsedCell, cell.offset - offset) @@ -391,7 +412,7 @@ class DSG(val proc: Procedure, else set } - // replace outgoing edges TODO might have to move this out after all cells have been processed + // replace outgoing edges if outgoing.size == 1 then pointTo.update(collapsedCell, outgoing.head) else if outgoing.size > 1 then @@ -505,6 +526,7 @@ class DSG(val proc: Procedure, newGraph.globalMapping.update((start, end), (idToNode(node.id), internalOffset)) } + newGraph.pointTo.clear() pointTo.foreach { case (cell1: DSC, (cell2: DSC, internalOffset: BigInt)) => val node1 = cell1.node.get @@ -580,14 +602,6 @@ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCount var rep: String = "" -// var size: BigInt = region match -// case Some(value) => value match -// case DataRegion2(regionIdentifier, start, size) => size -// case HeapRegion2(regionIdentifier, proc, size) => size -// case StackRegion2(regionIdentifier, proc, size) => size -// case UnknownRegion2(regionIdentifier, proc) => 0 -// case None => 0 - val cells: mutable.Map[BigInt, DSC] = mutable.Map() this.addCell(0, 0) @@ -717,6 +731,16 @@ class CallSite(val call: DirectCall, val graph: DSG) { def unwrapPaddingAndSlicing(expr: Expr): Expr = expr match + case literal: Literal => literal + case Repeat(repeats, body) => Repeat(repeats, unwrapPaddingAndSlicing(body)) + case SignExtend(extension, body) => SignExtend(extension, unwrapPaddingAndSlicing(body)) + case UnaryExpr(op, arg) => UnaryExpr(op, arg) + case BinaryExpr(op, arg1, arg2) => BinaryExpr(op, unwrapPaddingAndSlicing(arg1), unwrapPaddingAndSlicing(arg2)) + case MemoryStore(mem, index, value, endian, size) => + MemoryStore(mem, unwrapPaddingAndSlicing(index), unwrapPaddingAndSlicing(value), endian, size) + case MemoryLoad(mem, index, endian, size) => MemoryLoad(mem, unwrapPaddingAndSlicing(index), endian, size) + case Memory(name, addressSize, valueSize) => expr + case variable: Variable => variable case Extract(end, start, body) /*if start == 0 && end == 32*/ => unwrapPaddingAndSlicing(body) // this may make it unsound case ZeroExtend(extension, body) => unwrapPaddingAndSlicing(body) case _ => expr diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index c09d650ed..15a9e35c8 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -87,17 +87,6 @@ class Local( else None -// def getCells(pos: CFGPosition, arg: Variable): Set[(DSC, BigInt)] = -// if reachingDefs(pos).contains(arg) then -// reachingDefs(pos)(arg).foldLeft(Set[(DSC, BigInt)]()) { -// (s, defintion) => -// s + graph.varToCell(defintion)(arg) -// } -// else -// Set(graph.formals(arg)) - - - /** * Handles unification for instructions of the form R_x = R_y [+ offset] where R_y is a pointer and [+ offset] is optional @@ -176,30 +165,22 @@ class Local( val cs = CallSite(call, graph) graph.callsites.add(cs) cs.paramCells.foreach{ - case (variable: Variable, (cell: DSC, internal: BigInt)) => - // TODO assert(false) - val node = cell.node.get - val adjusted = node.addCell(cell.offset + internal, 0) - visitPointerArithmeticOperation(call, adjusted, variable, 0) + case (variable: Variable, slice: (DSC, BigInt)) => + visitPointerArithmeticOperation(call, adjust(slice), variable, 0) } cs.returnCells.foreach{ - case (variable: Variable, (cell: DSC, internal: BigInt)) => - val returnArgument = graph.varToCell(n)(variable)._1 - val returnArgumenetInternal = graph.varToCell(n)(variable)._2 - val returnArgumentNode = returnArgument.node.get - val adjustedReturnArgument = returnArgumentNode.addCell(returnArgument.offset + returnArgumenetInternal, 0) - val node = cell.node.get - val adjustedCell = node.addCell(cell.offset + internal, 0) - graph.mergeCells(adjustedReturnArgument, adjustedCell) + case (variable: Variable, slice: (DSC,BigInt)) => + val returnArgument = graph.varToCell(n)(variable) + graph.mergeCells(adjust(returnArgument), adjust(slice)) } case LocalAssign(variable, rhs, maybeString) => val expr: Expr = unwrapPaddingAndSlicing(rhs) val lhsCell = adjust(graph.varToCell(n)(variable)) - if isGlobal(expr, n).isDefined then - val global = isGlobal(expr, n).get + if isGlobal(rhs, n).isDefined then + val global = isGlobal(rhs, n).get graph.mergeCells(lhsCell, global) - else if isStack(expr, n).isDefined then // just in case stack can't be recognised in after this assignment - val stack = isStack(expr, n).get + else if isStack(rhs, n).isDefined then // just in case stack can't be recognised in after this assignment + val stack = isStack(rhs, n).get graph.mergeCells(lhsCell, stack) else expr match @@ -263,25 +244,30 @@ class Local( node.flags.unknown = true graph.collapseNode(node) - case MemoryAssign(memory, MemoryStore(mem, index, expr: Expr, endian, size), label) if unwrapPaddingAndSlicing(expr).isInstanceOf[Variable] => // if value is a literal ignore it + case MemoryAssign(memory, MemoryStore(mem, ind, expr: Expr, endian, size), label) if unwrapPaddingAndSlicing(expr).isInstanceOf[Variable] => // if value is a literal ignore it val value: Variable = unwrapPaddingAndSlicing(expr).asInstanceOf[Variable] + val index: Expr = unwrapPaddingAndSlicing(ind) reachingDefs(n)(value).foreach(visit) val byteSize = (size.toDouble/8).ceil.toInt val addressPointee: DSC = - if isGlobal(index, n, byteSize).isDefined then - graph.getPointeeAdjusted(isGlobal(index, n, byteSize).get) - else if isStack(index, n).isDefined then - graph.getPointeeAdjusted(isStack(index, n).get) + if isGlobal(ind, n, byteSize).isDefined then + graph.getPointeeAdjusted(isGlobal(ind, n, byteSize).get) + else if isStack(ind, n).isDefined then + graph.getPointeeAdjusted(isStack(ind, n).get) else index match - case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => -// assert(varToSym(n).contains(arg1)) - val offset = evaluateExpression(arg2, constProp(n)).get.value - visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg1, byteSize, true, offset) - case arg: Variable => -// assert(varToSym(n).contains(arg)) - visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg, byteSize, true) - case _ => ??? + case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => +// assert(varToSym(n).contains(arg1)) + val offset = evaluateExpression(arg2, constProp(n)).get.value + visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg1, byteSize, true, offset) + case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isEmpty=> +// assert(varToSym(n).contains(arg1)) + visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg1, byteSize, true, 0, true) + case arg: Variable => +// assert(varToSym(n).contains(arg)) + visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg, byteSize, true) + case _ => + ??? addressPointee.node.get.flags.modified = true val valueCells = graph.getCells(n, value) @@ -293,28 +279,10 @@ class Local( case _ => } def analyze(): DSG = - val domain = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).reverse + val domain = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString) domain.foreach(visit) -// println(graph.formals) -// val results = graph.varToCell.keys.toSeq.sortBy(_.toShortString) -// results.foreach { -// pos => -// println(pos) -// val tab = " " -// graph.varToCell(pos).foreach { -// case (variable, cell) => -// println(tab + variable.toString + " -> " + cell.toString) -// } -// } -// println(graph.pointTo) -// // collect the nodes in the dsg - graph.nodes.addAll(graph.formals.values.map(_._1.node.get)) - graph.varToCell.values.foreach( - value => graph.nodes.addAll(value.values.map(_._1.node.get)) - ) - graph.nodes.addAll(graph.stackMapping.values) - graph.nodes.addAll(graph.globalMapping.values.map(_._1)) + graph.collectNodes graph } diff --git a/src/main/scala/analysis/SymbolicAccessAnalysis.scala b/src/main/scala/analysis/SymbolicAccessAnalysis.scala index 1579e4155..613308c5e 100644 --- a/src/main/scala/analysis/SymbolicAccessAnalysis.scala +++ b/src/main/scala/analysis/SymbolicAccessAnalysis.scala @@ -25,7 +25,7 @@ case class HeapRegion2(override val regionIdentifier: String, proc: Procedure, s } case class DataRegion2(override val regionIdentifier: String, start: BigInt, size: BigInt) extends MemoryRegion2 { - override def toString: String = s"Data($regionIdentifier, $start)" + override def toString: String = s"Data($regionIdentifier, $start, $size)" } case class UnknownRegion2(override val regionIdentifier: String, proc: Procedure) extends MemoryRegion2 { @@ -85,7 +85,7 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem def edgesOther(n: CFGPosition)(d: DL): Map[DL, EdgeFunction[TwoElement]] = n match - case LocalAssign(variable, rhs, maybeString) => + case LocalAssign(variable, rhs, maybeString: Option[String]) => val expr = unwrapPaddingAndSlicing(rhs) expr match case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) && arg1.equals(stackPointer) diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 9c2562514..c2fe10db1 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -1,6 +1,6 @@ package util -import java.io.{File, PrintWriter, FileInputStream, BufferedWriter, FileWriter, IOException} +import java.io.{BufferedWriter, File, FileInputStream, FileWriter, IOException, PrintWriter} import com.grammatech.gtirb.proto.IR.IR import com.grammatech.gtirb.proto.Module.Module import com.grammatech.gtirb.proto.Section.Section @@ -706,13 +706,13 @@ object StaticAnalysis { IRconstPropResult = newCPResult, memoryRegionResult = mraResult, vsaResult = vsaResult, - interLiveVarsResults = interLiveVarsResults, - paramResults = paramResults, + interLiveVarsResults = Map.empty, + paramResults = Map.empty, steensgaardResults = steensgaardResults, mmmResults = mmm, memoryRegionContents = memoryRegionContents, + symbolicAccessess = Map.empty, reachingDefs = reachingDefinitionsAnalysisResults, - symbolicAccessess = symResults, locals = None, bus = None, tds = None, @@ -956,15 +956,30 @@ object RunUtils { writeToFile(newCFG.toDot(x => x.toString, Output.dotIder), s"${s}_resolvedCFG.dot") } - Logger.info("[!] Running Region Builder") + Logger.info("[!] Running Writes To") val writesTo = WriteToAnalysis(ctx.program).analyze() val reachingDefs = ReachingDefsAnalysis(ctx.program, writesTo).analyze() config.analysisDotPath.foreach( s => writeToFile(toDot(ctx.program), s"${s}_ct.dot") ) -// val b = Local(ctx.program.mainProcedure, analysisResult.last.symbolicAccessess, analysisResult.last.IRconstPropResult, ctx.globals, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, analysisResult.last.paramResults).analyze() - val dsa = DSA(ctx.program, analysisResult.last.symbolicAccessess, analysisResult.last.IRconstPropResult, ctx.globals, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, analysisResult.last.paramResults) + + Logger.info("[!] Running Symbolic Access Analysis") + val symResults: Map[CFGPosition, Map[SymbolicAccess, TwoElement]] = + SymbolicAccessAnalysis(ctx.program, analysisResult.last.IRconstPropResult).analyze() + config.analysisDotPath.foreach(s => + writeToFile(toDot(ctx.program, symResults.foldLeft(Map(): Map[CFGPosition, String]) { + (m, t) => + m + (t._1 -> t._2.toString) + }), s"${s}_saa.dot") + ) + + + Logger.info("[!] Running Parameter Analysis") + val paramResults = ParamAnalysis(ctx.program).analyze() + + Logger.info("[!] Running DSA Analysis") + val dsa = DSA(ctx.program, symResults, analysisResult.last.IRconstPropResult, ctx.globals, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, paramResults) dsa.analyze() Logger.info(s"[!] Finished indirect call resolution after $iteration iterations") @@ -975,11 +990,11 @@ object RunUtils { memoryRegionResult = analysisResult.last.memoryRegionResult, vsaResult = analysisResult.last.vsaResult, interLiveVarsResults = analysisResult.last.interLiveVarsResults, - paramResults = analysisResult.last.paramResults, + paramResults = paramResults, //analysisResult.last.paramResults, steensgaardResults = analysisResult.last.steensgaardResults, mmmResults = analysisResult.last.mmmResults, memoryRegionContents = analysisResult.last.memoryRegionContents, - symbolicAccessess = analysisResult.last.symbolicAccessess, + symbolicAccessess = symResults, // analysisResult.last.symbolicAccessess, locals = Some(dsa.locals.toMap), bus = Some(dsa.bu.toMap), tds = Some(dsa.td.toMap), diff --git a/src/test/scala/LocalTest.scala b/src/test/scala/LocalTest.scala index 2c7bdc62e..650826ac6 100644 --- a/src/test/scala/LocalTest.scala +++ b/src/test/scala/LocalTest.scala @@ -26,8 +26,7 @@ class LocalTest extends AnyFunSuite, TestUtil { ) val program = results.ir.program val dsg = results.analysis.get.locals.get(program.mainProcedure) - println(dsg.stackMapping) - assert(dsg.pointTo.size == 9) + assert(dsg.pointTo.size == 12) // 12 val framePointer = dsg.stackMapping(0).cells(0) // R31 assert(dsg.pointTo(framePointer)._1.equals(dsg.formals(R29)._1)) val stack8 = dsg.stackMapping(8).cells(0) // R31 + 8 @@ -39,10 +38,252 @@ class LocalTest extends AnyFunSuite, TestUtil { assert(stack24.node.get.collapsed) assert(dsg.pointTo(stack24)._1.equals(stack24)) - assert(dsg.pointTo(stack40)._1.equals(dsg.getPointee(dsg.getPointee(dsg.globalMapping((69600, 69600))._1.cells(0))._1)._1)) + assert(dsg.pointTo(stack40).equals(dsg.getPointee(dsg.getPointee(dsg.globalMapping((69600, 69600))._1.cells(0))._1))) + + } + +// test("local jumptable2_clang add_two") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2_clang.adt", +// relfFile = "examples/jumptable2/jumptable2_clang.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.locals.get(program.procs("add_two")) +// assert(dsg.pointTo.size == 7) +// assert(dsg.stackMapping.isEmpty) +// assert(dsg.pointTo(dsg.globalMapping((69680, 69684))._1.cells(0))._1.node.get.collapsed) +// } +// +// test("local jumptable2_clang add_six") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2_clang.adt", +// relfFile = "examples/jumptable2/jumptable2_clang.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.locals.get(program.procs("add_six")) +// assert(dsg.pointTo.size == 7) +// assert(dsg.stackMapping.isEmpty) +// assert(dsg.pointTo(dsg.globalMapping((69680, 69684))._1.cells(0))._1.node.get.collapsed) +// } +// +// test("local jumptable2_clang sub_seven") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2_clang.adt", +// relfFile = "examples/jumptable2/jumptable2_clang.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.locals.get(program.procs("sub_seven")) +// assert(dsg.pointTo.size == 7) +// assert(dsg.stackMapping.isEmpty) +// assert(dsg.pointTo(dsg.globalMapping((69680, 69684))._1.cells(0))._1.node.get.collapsed) +// } + + test("local jumptable2 sub_seven") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.locals.get(program.procs("sub_seven")) + assert(dsg.pointTo.size == 9) + assert(dsg.stackMapping.isEmpty) + println(dsg.globalMapping((69648, 69652))._1.cells(0)) + assert(dsg.pointTo(dsg.globalMapping((69648, 69652))._1.cells(0))._1.node.get.collapsed) + + // initial global mappings + assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + + } + + test("local jumptable2 add_six") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.locals.get(program.procs("add_six")) + assert(dsg.pointTo.size == 9) + assert(dsg.stackMapping.isEmpty) + assert(dsg.pointTo(dsg.globalMapping((69648, 69652))._1.cells(0))._1.node.get.collapsed) + + // initial global mappings + assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + + } + + test("local jumptable2 add_two") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.locals.get(program.procs("add_two")) + assert(dsg.pointTo.size == 9) + assert(dsg.stackMapping.isEmpty) + println(dsg.globalMapping((69648, 69652))._1.cells(0)) + assert(dsg.pointTo(dsg.globalMapping((69648, 69652))._1.cells(0))._1.node.get.collapsed) + + // initial global mappings + assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) } + test("local jumptable2 main") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + + + val program = results.ir.program + val dsg = results.analysis.get.locals.get(program.mainProcedure) + assert(dsg.pointTo.size == 12) // 12 + val framePointer = dsg.stackMapping(0).cells(0) + val stack8 = dsg.stackMapping(8).cells(0) + val stack16 = dsg.stackMapping(16).cells(0) + val stack28 = dsg.stackMapping(28).cells(0) + assert(dsg.pointTo(framePointer).equals(dsg.formals(R29))) + assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) + assert(dsg.pointTo(stack16).equals(dsg.formals(R1))) + assert(dsg.pointTo(stack28).equals(dsg.formals(R0))) + + // initial global mappings + assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + + + + // assert(dsg.pointTo.size == 7) + // assert(dsg.stackMapping.isEmpty) + // assert(dsg.pointTo(dsg.globalMapping((69680, 69684))._1.cells(0))._1.node.get.collapsed) + } + + + + ignore("local jumptable2_clang main") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2_clang.adt", + relfFile = "examples/jumptable2/jumptable2_clang.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.locals.get(program.mainProcedure) + print("") +// assert(dsg.pointTo.size == 7) +// assert(dsg.stackMapping.isEmpty) +// assert(dsg.pointTo(dsg.globalMapping((69680, 69684))._1.cells(0))._1.node.get.collapsed) + } + + + + + ignore("interproc unsafe pointer arithmetic") { + // test interproc unification with points-to that have internal offsets into cells + } + + test("unsafe pointer arithmetic") { val results = RunUtils.loadAndTranslate( BASILConfig( @@ -66,7 +307,7 @@ class LocalTest extends AnyFunSuite, TestUtil { val stack40 = dsg.stackMapping(40).cells(0) val stack48 = dsg.stackMapping(48).cells(0) val stack56 = dsg.stackMapping(56).cells(0) - assert(dsg.pointTo.size==9) + assert(dsg.pointTo.size==10) assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) assert(dsg.pointTo(stack24).equals(dsg.pointTo(stack32))) @@ -102,7 +343,7 @@ class LocalTest extends AnyFunSuite, TestUtil { val stack24 = dsg.stackMapping(24).cells(0) val stack32 = dsg.stackMapping(32).cells(0) val stack40 = dsg.stackMapping(40).cells(0) - assert(dsg.pointTo.size == 8) + assert(dsg.pointTo.size == 9) assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) assert(dsg.pointTo(stack24)._1.node.get.equals(dsg.pointTo(stack32)._1.node.get)) @@ -130,7 +371,7 @@ class LocalTest extends AnyFunSuite, TestUtil { val dsg = results.analysis.get.locals.get(program.procs("callee")) val stack8 = dsg.stackMapping(8).cells(0) // R31 + 8 val stack24 = dsg.stackMapping(24).cells(0) // R31 + 24 - assert(dsg.pointTo.size == 2) + assert(dsg.pointTo.size == 3) assert(dsg.getPointee(stack8).equals(dsg.formals(R0))) assert(dsg.getPointee(stack8)._1.offset == 0) assert(dsg.getPointee(stack24)._1.equals(dsg.formals(R0)._1.node.get.cells(16))) @@ -261,6 +502,218 @@ class LocalTest extends AnyFunSuite, TestUtil { } // bottom up tests + test("bottom up jumptable2 sub_seven") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.bus.get(program.procs("sub_seven")) + assert(dsg.pointTo.size == 9) + assert(dsg.stackMapping.isEmpty) + println(dsg.globalMapping((69648, 69652))._1.cells(0)) + assert(dsg.pointTo(dsg.globalMapping((69648, 69652))._1.cells(0))._1.node.get.collapsed) + + // initial global mappings + assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + + + } + + test("bottom up jumptable2 add_six") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.bus.get(program.procs("add_six")) + assert(dsg.pointTo.size == 9) + assert(dsg.stackMapping.isEmpty) + println(dsg.globalMapping((69648, 69652))._1.cells(0)) + assert(dsg.pointTo(dsg.globalMapping((69648, 69652))._1.cells(0))._1.node.get.collapsed) + + // initial global mappings + assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + + } + + test("bottomup jumptable2 add_two") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.bus.get(program.procs("add_two")) + assert(dsg.pointTo.size == 9) + assert(dsg.stackMapping.isEmpty) + println(dsg.globalMapping((69648, 69652))._1.cells(0)) + assert(dsg.pointTo(dsg.globalMapping((69648, 69652))._1.cells(0))._1.node.get.collapsed) + + // initial global mappings + assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + + } + + test("bottom up jumptable2 main") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + + + val program = results.ir.program + val dsg = results.analysis.get.bus.get(program.mainProcedure) + assert(dsg.pointTo.size == 13) // 13 + val framePointer = dsg.stackMapping(0).cells(0) + val stack8 = dsg.stackMapping(8).cells(0) + val stack16 = dsg.stackMapping(16).cells(0) + val stack28 = dsg.stackMapping(28).cells(0) + assert(dsg.pointTo(framePointer).equals(dsg.formals(R29))) + assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) + assert(dsg.pointTo(stack16).equals(dsg.formals(R1))) + assert(dsg.pointTo(stack28).equals(dsg.formals(R0))) + + // initial global mappings + assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + + // bu + assert(dsg.pointTo(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))._1.node.get.collapsed) + + } + + +// test("bottom-up jumptable2_clang add_two") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2_clang.adt", +// relfFile = "examples/jumptable2/jumptable2_clang.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.bus.get(program.procs("add_two")) +// assert(dsg.pointTo.size == 7) +// assert(dsg.stackMapping.isEmpty) +// assert(dsg.pointTo(dsg.globalMapping((69680, 69684))._1.cells(0))._1.node.get.collapsed) +// } +// +// test("bottom-up jumptable2_clang add_six") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2_clang.adt", +// relfFile = "examples/jumptable2/jumptable2_clang.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.bus.get(program.procs("add_six")) +// assert(dsg.pointTo.size == 7) +// assert(dsg.stackMapping.isEmpty) +// assert(dsg.pointTo(dsg.globalMapping((69680, 69684))._1.cells(0))._1.node.get.collapsed) +// } +// +// test("bottom-up jumptable2_clang sub_seven") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2_clang.adt", +// relfFile = "examples/jumptable2/jumptable2_clang.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.bus.get(program.procs("sub_seven")) +// assert(dsg.pointTo.size == 7) +// assert(dsg.stackMapping.isEmpty) +// assert(dsg.pointTo(dsg.globalMapping((69680, 69684))._1.cells(0))._1.node.get.collapsed) +// } + + test("bottom up interproc pointer arithmetic callee") { // same as interproc pointer arithmetic callee's local graph (no changes should have been made) val results = RunUtils.loadAndTranslate( @@ -280,7 +733,7 @@ class LocalTest extends AnyFunSuite, TestUtil { val dsg = results.analysis.get.bus.get(program.procs("callee")) val stack8 = dsg.stackMapping(8).cells(0) // R31 + 8 val stack24 = dsg.stackMapping(24).cells(0) // R31 + 24 - assert(dsg.pointTo.size == 2) + assert(dsg.pointTo.size == 3) assert(dsg.getPointee(stack8).equals(dsg.formals(R0))) assert(dsg.getPointee(stack8)._1.offset == 0) assert(dsg.getPointee(stack24)._1.equals(dsg.formals(R0)._1.node.get.cells(16))) @@ -308,7 +761,7 @@ class LocalTest extends AnyFunSuite, TestUtil { val stack24 = dsg.stackMapping(24).cells(0) val stack32 = dsg.stackMapping(32).cells(0) val stack40 = dsg.stackMapping(40).cells(0) - assert(dsg.pointTo.size == 8) + assert(dsg.pointTo.size == 9) assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) assert(dsg.pointTo(stack24)._1.node.get.equals(dsg.pointTo(stack32)._1.node.get)) @@ -324,6 +777,151 @@ class LocalTest extends AnyFunSuite, TestUtil { // top down tests + test("top down jumptable2 main") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + + val program = results.ir.program + val dsg = results.analysis.get.tds.get(program.mainProcedure) + assert(dsg.pointTo.size == 13) // 13 + val framePointer = dsg.stackMapping(0).cells(0) + val stack8 = dsg.stackMapping(8).cells(0) + val stack16 = dsg.stackMapping(16).cells(0) + val stack28 = dsg.stackMapping(28).cells(0) + assert(dsg.pointTo(framePointer).equals(dsg.formals(R29))) + assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) + assert(dsg.pointTo(stack16).equals(dsg.formals(R1))) + assert(dsg.pointTo(stack28).equals(dsg.formals(R0))) + + // initial global mappings + assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + + // bu + assert(dsg.pointTo(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))._1.node.get.collapsed) + } + + test("top down jumptable2 sub_seven") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.tds.get(program.procs("sub_seven")) + assert(dsg.pointTo.size == 9) + assert(dsg.stackMapping.isEmpty) + println(dsg.globalMapping((69648, 69652))._1.cells(0)) + assert(dsg.pointTo(dsg.globalMapping((69648, 69652))._1.cells(0))._1.node.get.collapsed) + + // initial global mappings + assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + + + } + + test("top down jumptable2 add_six") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.tds.get(program.procs("add_six")) + assert(dsg.pointTo.size == 9) + assert(dsg.stackMapping.isEmpty) + println(dsg.globalMapping((69648, 69652))._1.cells(0)) + assert(dsg.pointTo(dsg.globalMapping((69648, 69652))._1.cells(0))._1.node.get.collapsed) + + // initial global mappings + assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + + } + + test("top down jumptable2 add_two") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.tds.get(program.procs("add_two")) + assert(dsg.pointTo.size == 9) + assert(dsg.stackMapping.isEmpty) + println(dsg.globalMapping((69648, 69652))._1.cells(0)) + assert(dsg.pointTo(dsg.globalMapping((69648, 69652))._1.cells(0))._1.node.get.collapsed) + + // initial global mappings + assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + + } + test("top down interproc pointer arithmetic callee") { // same as interproc pointer arithmetic callee's local graph (no changes should have been made) val results = RunUtils.loadAndTranslate( @@ -343,7 +941,7 @@ class LocalTest extends AnyFunSuite, TestUtil { val dsg = results.analysis.get.tds.get(program.procs("callee")) val stack8 = dsg.stackMapping(8).cells(0) // R31 + 8 val stack24 = dsg.stackMapping(24).cells(0) // R31 + 24 - assert(dsg.pointTo.size == 5) + assert(dsg.pointTo.size == 6) assert(dsg.getPointee(stack8).equals(dsg.formals(R0))) assert(dsg.getPointee(stack8)._1.offset == 16) assert(dsg.getPointee(stack24)._1.equals(dsg.formals(R0)._1.node.get.cells(32))) @@ -372,7 +970,7 @@ class LocalTest extends AnyFunSuite, TestUtil { val stack24 = dsg.stackMapping(24).cells(0) val stack32 = dsg.stackMapping(32).cells(0) val stack40 = dsg.stackMapping(40).cells(0) - assert(dsg.pointTo.size == 8) + assert(dsg.pointTo.size == 9) assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) assert(dsg.pointTo(stack24)._1.node.get.equals(dsg.pointTo(stack32)._1.node.get)) From fccfa9487a3a1729022b7e3cb7c1db5298acbf6b Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 1 Jul 2024 11:04:15 +1000 Subject: [PATCH 013/104] IL changes --- src/main/scala/analysis/DSA.scala | 28 ++- src/main/scala/analysis/DSAUtility.scala | 87 ++++---- src/main/scala/analysis/Local.scala | 34 +++- src/main/scala/analysis/ReachingDefs.scala | 6 +- .../analysis/SymbolicAccessAnalysis.scala | 42 ++-- src/main/scala/analysis/WriteToAnalysis.scala | 20 +- src/main/scala/util/RunUtils.scala | 39 ++-- src/test/scala/LocalTest.scala | 191 +++--------------- 8 files changed, 184 insertions(+), 263 deletions(-) diff --git a/src/main/scala/analysis/DSA.scala b/src/main/scala/analysis/DSA.scala index 78d8e2454..fda0ebacf 100644 --- a/src/main/scala/analysis/DSA.scala +++ b/src/main/scala/analysis/DSA.scala @@ -5,6 +5,20 @@ import specification.{ExternalFunction, SpecGlobal} import scala.collection.mutable +/** + * Data Structure Analysis + * Performs all phases of DSA and stores the results in member variables + * local, bottom-up, top-down results in member variables locals, bu and td respectively. + * @param program program to be analysed + * @param symResults result of symbolic access analysis + * @param constProp + * @param globals + * @param globalOffsets + * @param externalFunctions + * @param reachingDefs + * @param writesTo mapping from procedures to registers they change + * @param params mapping from procedures to their parameters + */ class DSA(program: Program, symResults: Map[CFGPosition, Map[SymbolicAccess, TwoElement]], constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], @@ -19,9 +33,9 @@ class DSA(program: Program, val bu: mutable.Map[Procedure, DSG] = mutable.Map() val td: mutable.Map[Procedure, DSG] = mutable.Map() - val stackPointer = Register("R31", BitVecType(64)) - val returnPointer = Register("R30", BitVecType(64)) - val framePointer = Register("R29", BitVecType(64)) + val stackPointer = Register("R31", 64) + val returnPointer = Register("R30", 64) + val framePointer = Register("R29", 64) val ignoreRegisters: Set[Variable] = Set(stackPointer, returnPointer, framePointer) @@ -48,11 +62,9 @@ class DSA(program: Program, stack.pushAll(current.calls.diff(domain)) -// computeDomain(CallGraph, Set(program.mainProcedure)) domain.foreach( proc => val dsg = Local(proc, symResults, constProp, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params).analyze() -// println(s"Node Counter before local for ${proc.name} : " + NodeCounter.counter) locals.update(proc, dsg) bu.update(proc, dsg.cloneSelf()) @@ -66,10 +78,8 @@ class DSA(program: Program, visited += proc val preds : Set[Procedure] = CallGraph.pred(proc) queue.enqueueAll(CallGraph.pred(proc).diff(visited).intersect(domain)) -// CallGraph.pred(proc).foreach(buildBUQueue) ) -// println("Node Counter before bottom up: " + NodeCounter.counter) while queue.nonEmpty do val proc = queue.dequeue() visited += proc @@ -115,7 +125,6 @@ class DSA(program: Program, case (range: (BigInt, BigInt), (node: DSN, internal: BigInt)) => buGraph.mergeCells(buGraph.globalMapping(range)._1.getCell(buGraph.globalMapping(range)._2), node.getCell(internal)) -// node.cloneNode(calleeGraph, buGraph) } buGraph.varToCell.getOrElse(begin(callee), Map.empty).foreach{ @@ -145,7 +154,6 @@ class DSA(program: Program, queue.enqueue(program.mainProcedure) visited = Set() -// println("Node Counter before top down: " + NodeCounter.counter) while queue.nonEmpty do val proc = queue.dequeue() @@ -181,7 +189,6 @@ class DSA(program: Program, case (range: (BigInt, BigInt), (node: DSN, internal: BigInt)) => calleesGraph.mergeCells(calleesGraph.globalMapping(range)._1.getCell(calleesGraph.globalMapping(range)._2), node.getCell(internal)) - // node.cloneNode(calleeGraph, buGraph) } callSite.paramCells.keySet.foreach( @@ -200,6 +207,7 @@ class DSA(program: Program, case (c: DSC, retCell: (DSC, BigInt)) => calleesGraph.mergeCells(c, adjust(retCell)) } + case _ => ??? } ) callersGraph.collectNodes diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index 9b18f1fd4..cc71fbcde 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -1,13 +1,10 @@ package analysis -import ir.{BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IntraProcIRCursor, Literal, LocalAssign, Memory, MemoryAssign, MemoryLoad, MemoryStore, Procedure, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, begin, computeDomain, toShortString} +import ir.{BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IntraProcIRCursor, Literal, Assign, Memory, MemoryAssign, MemoryLoad, Procedure, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, begin, computeDomain, toShortString} import specification.{ExternalFunction, SpecGlobal} -import util.writeToFile import scala.util.control.Breaks.{break, breakable} import java.math.BigInteger -import java.util.StringJoiner -import scala.collection.mutable import scala.collection.mutable object NodeCounter { @@ -18,6 +15,19 @@ object NodeCounter { counter } + +/** + * Data Structure Graph for DSA + * @param proc procedure of DSG + * @param constProp + * @param varToSym mapping flow-sensitive (position sensitive) mapping from registers to their set of symbolic accesses + * @param globals + * @param globalOffsets + * @param externalFunctions + * @param reachingDefs + * @param writesTo + * @param params + */ class DSG(val proc: Procedure, constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], varToSym: Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]], @@ -32,8 +42,8 @@ class DSG(val proc: Procedure, val pointTo: mutable.Map[DSC, (DSC, BigInt)] = mutable.Map() val callsites: mutable.Set[CallSite] = mutable.Set() - val mallocRegister = Register("R0", BitVecType(64)) - val stackPointer = Register("R31", BitVecType(64)) + val mallocRegister = Register("R0", 64) + val stackPointer = Register("R31", 64) // make stack nodes with val stackMapping: mutable.Map[BigInt, DSN] = @@ -50,14 +60,14 @@ class DSG(val proc: Procedure, varToSym(pos)(arg1).foldLeft(m) { (m, sym) => sym match - case SymbolicAccess(accessor, StackRegion2(regionIdentifier, proc, size), symOffset) => + case SymbolicAccess(accessor, StackLocation(regionIdentifier, proc, size), symOffset) => offset = offset + symOffset if m.contains(offset) then assert(!m(offset).cells(0).growSize(byteSize)) m else val node = DSN(Some(this), byteSize) - node.allocationRegions.add(StackRegion2(pos.toShortString, proc, byteSize)) + node.allocationRegions.add(StackLocation(pos.toShortString, proc, byteSize)) node.flags.stack = true node.addCell(0, byteSize) m + (offset -> node) @@ -67,13 +77,13 @@ class DSG(val proc: Procedure, varToSym(pos)(arg).foldLeft(m) { (m, sym) => sym match - case SymbolicAccess(accessor, StackRegion2(regionIdentifier, proc, size), offset) => + case SymbolicAccess(accessor, StackLocation(regionIdentifier, proc, size), offset) => if m.contains(offset) then assert(!m(offset).cells(0).growSize(byteSize)) m else val node = DSN(Some(this), byteSize) - node.allocationRegions.add(StackRegion2(pos.toShortString, proc, byteSize)) + node.allocationRegions.add(StackLocation(pos.toShortString, proc, byteSize)) node.flags.stack = true node.addCell(0, byteSize) m + (offset -> node) @@ -82,12 +92,12 @@ class DSG(val proc: Procedure, case _ => m private def stackBuilder(pos: CFGPosition, m: Map[BigInt, DSN]): Map[BigInt, DSN] = { pos match - case LocalAssign(variable: Variable, expr: Expr, _) => + case Assign(variable: Variable, expr: Expr, _) => expr match case MemoryLoad(mem, index, endian, size) => visitStackAccess(pos, index, size, m) case _ => m - case MemoryAssign(mem, MemoryStore(mem2, index, value, endian, size), label) => + case MemoryAssign(mem, index: Expr, value: Expr, endian, size: Int, label) => visitStackAccess(pos, index, size, m) case _ => m @@ -100,7 +110,7 @@ class DSG(val proc: Procedure, globals.foreach( global => val node = DSN(Some(this), global.size) - node.allocationRegions.add(DataRegion2(global.name, global.address, global.size/8)) + node.allocationRegions.add(DataLocation(global.name, global.address, global.size/8)) node.flags.global = true node.flags.incomplete = true globalMapping.update((global.address, global.address + global.size/8), (node, 0)) @@ -125,7 +135,7 @@ class DSG(val proc: Procedure, case None => val node = DSN(Some(this)) - node.allocationRegions.add(DataRegion2(s"Relocated_$relocatedAddress", relocatedAddress, 8)) + node.allocationRegions.add(DataLocation(s"Relocated_$relocatedAddress", relocatedAddress, 8)) node.flags.global = true node.flags.incomplete = true globalMapping.update((relocatedAddress, relocatedAddress + 8), (node, 0)) @@ -139,7 +149,7 @@ class DSG(val proc: Procedure, externalFunctions.foreach( external => val node = DSN(Some(this)) - node.allocationRegions.add(DataRegion2(external.name, external.offset, 0)) + node.allocationRegions.add(DataLocation(external.name, external.offset, 0)) node.flags.global = true node.flags.incomplete = true globalMapping.update((external.offset, external.offset), (node, 0)) @@ -149,12 +159,16 @@ class DSG(val proc: Procedure, // determine if an address is a global and return the corresponding global if it is. def isGlobal(address: BigInt): Option[((BigInt, BigInt), (DSN, BigInt))] = - for (elem <- globalMapping) { - val range = elem._1 - if address >= range._1 && (address < range._2 || (range._1 == range._2 && range._2 == address)) then - return Some(elem) + var global: Option[((BigInt, BigInt), (DSN, BigInt))] = None + breakable { + for (elem <- globalMapping) { + val range = elem._1 + if address >= range._1 && (address < range._2 || (range._1 == range._2 && range._2 == address)) then + global = Some(elem) + break + } } - None + global private def replaceInEV(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = varToCell.foreach( @@ -441,37 +455,32 @@ class DSG(val proc: Procedure, val varToCell: mutable.Map[CFGPosition, mutable.Map[Variable, (DSC, BigInt)]] = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(mutable.Map[CFGPosition, mutable.Map[Variable, (DSC, BigInt)]]()) { (m, pos) => pos match - case LocalAssign(variable, value , label) => + case Assign(variable, value , label) => value.variables.foreach( v => if isFormal(pos, v) then val node = DSN(Some(this)) node.flags.incomplete = true - node.rep = "formal" nodes.add(node) formals.update(v, (node.cells(0), 0)) ) val node = DSN(Some(this)) - node.rep = "ssa" - m + (pos -> mutable.Map(variable -> (node.cells(0), 0))) + m +=(pos -> mutable.Map(variable -> (node.cells(0), 0))) case DirectCall(proc, target, label) if proc.name == "malloc" => val node = DSN(Some(this)) - node.rep = "ssa" - m + (pos -> mutable.Map(mallocRegister -> (node.cells(0), 0))) + m += (pos -> mutable.Map(mallocRegister -> (node.cells(0), 0))) case DirectCall(proc, target, label) if writesTo.contains(proc) => val result: Map[Variable, (DSC, BigInt)] = writesTo(proc).foldLeft(Map[Variable, (DSC, BigInt)]()){ (n, variable) => val node = DSN(Some(this)) - node.rep = "ssa" n + (variable -> (node.cells(0), 0)) } - m + (pos -> result.to(mutable.Map)) - case MemoryAssign(memory, MemoryStore(mem, index, expr: Expr, endian, size), label) if unwrapPaddingAndSlicing(expr).isInstanceOf[Variable] => + m += (pos -> result.to(mutable.Map)) + case MemoryAssign(memory, index: Expr, expr: Expr, endian, size: Int, label) if unwrapPaddingAndSlicing(expr).isInstanceOf[Variable] => val value: Variable = unwrapPaddingAndSlicing(expr).asInstanceOf[Variable] if isFormal(pos, value) then val node = DSN(Some(this)) node.flags.incomplete = true - node.rep = "formal" nodes.add(node) formals.update(value, (node.cells(0), 0)) m @@ -598,9 +607,7 @@ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCount var flags = Flags() def collapsed = flags.collapsed - val allocationRegions: mutable.Set[MemoryRegion2] = mutable.Set() - - var rep: String = "" + val allocationRegions: mutable.Set[MemoryLocation] = mutable.Set() val cells: mutable.Map[BigInt, DSC] = mutable.Map() this.addCell(0, 0) @@ -700,6 +707,11 @@ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCount override def toString: String = s"Node($id, $allocationRegions ${if collapsed then ", collapsed" else ""})" } +/** + * a cell in DSA + * @param node the node this cell belongs to + * @param offset the offset of the cell + */ case class DSC(node: Option[DSN], offset: BigInt) { var largestAccessedSize: BigInt = 0 @@ -713,6 +725,11 @@ case class DSC(node: Option[DSN], offset: BigInt) override def toString: String = s"Cell(${if node.isDefined then node.get.toString else "NONE"}, $offset)" } +/** + * represents a direct call in DSA + * @param call instance of the call + * @param graph caller's DSG + */ class CallSite(val call: DirectCall, val graph: DSG) { val proc = call.target val paramCells: mutable.Map[Variable, (DSC, BigInt)] = graph.params(proc).foldLeft(mutable.Map[Variable, (DSC, BigInt)]()) { @@ -736,10 +753,7 @@ def unwrapPaddingAndSlicing(expr: Expr): Expr = case SignExtend(extension, body) => SignExtend(extension, unwrapPaddingAndSlicing(body)) case UnaryExpr(op, arg) => UnaryExpr(op, arg) case BinaryExpr(op, arg1, arg2) => BinaryExpr(op, unwrapPaddingAndSlicing(arg1), unwrapPaddingAndSlicing(arg2)) - case MemoryStore(mem, index, value, endian, size) => - MemoryStore(mem, unwrapPaddingAndSlicing(index), unwrapPaddingAndSlicing(value), endian, size) case MemoryLoad(mem, index, endian, size) => MemoryLoad(mem, unwrapPaddingAndSlicing(index), endian, size) - case Memory(name, addressSize, valueSize) => expr case variable: Variable => variable case Extract(end, start, body) /*if start == 0 && end == 32*/ => unwrapPaddingAndSlicing(body) // this may make it unsound case ZeroExtend(extension, body) => unwrapPaddingAndSlicing(body) @@ -780,6 +794,7 @@ def adjust(tuple: (DSC, BigInt)): DSC = val internal = tuple._2 adjust(cell, internal) +// minimum 2's complement 64 bit negative integer val BITVECNEGATIVE: BigInt = new BigInt(new BigInteger("9223372036854775808")) diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index 15a9e35c8..9faa83a34 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -1,12 +1,24 @@ package analysis -import ir.{BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IntraProcIRCursor, LocalAssign, MemoryAssign, MemoryLoad, MemoryStore, Procedure, Register, Variable, ZeroExtend, computeDomain, toShortString} +import ir.{BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Endian, Expr, Extract, IntraProcIRCursor, Assign, MemoryAssign, MemoryLoad, Procedure, Register, Variable, ZeroExtend, computeDomain, toShortString} import specification.{ExternalFunction, SpecGlobal} import scala.util.control.Breaks.{break, breakable} import java.math.BigInteger import scala.collection.mutable +/** + * The local phase of Data Structure Analysis + * @param proc procedure to be analysed + * @param symResults result of symbolic access analysis + * @param constProp + * @param globals + * @param globalOffsets + * @param externalFunctions + * @param reachingDefs + * @param writesTo mapping from procedures to registers they change + * @param params mapping from procedures to their parameters + */ class Local( proc: Procedure, symResults: Map[CFGPosition, Map[SymbolicAccess, TwoElement]], @@ -18,8 +30,8 @@ class Local( params: Map[Procedure, Set[Variable]] ) extends Analysis[Any]{ - private val mallocRegister = Register("R0", BitVecType(64)) - private val stackPointer = Register("R31", BitVecType(64)) + private val mallocRegister = Register("R0", 64) + private val stackPointer = Register("R31", 64) private val visited: mutable.Set[CFGPosition] = mutable.Set() @@ -32,9 +44,9 @@ class Local( if m.contains(access._1.accessor) then // every variable pointing to a stack region ONLY has one symbolic access associated with it. m(access._1.accessor).foreach( - sym => assert(!sym.symbolicBase.isInstanceOf[StackRegion2]) + sym => assert(!sym.symbolicBase.isInstanceOf[StackLocation]) ) - assert(!access._1.symbolicBase.isInstanceOf[StackRegion2]) + assert(!access._1.symbolicBase.isInstanceOf[StackLocation]) m + (access._1.accessor -> (m(access._1.accessor) + access._1)) else m + (access._1.accessor -> Set(access._1)) @@ -46,7 +58,7 @@ class Local( def isStack(expr: Expr, pos: CFGPosition): Option[DSC] = expr match case BinaryExpr(op, arg1: Variable, arg2) if varToSym.contains(pos) && varToSym(pos).contains(arg1) && - varToSym(pos)(arg1).size == 1 && varToSym(pos)(arg1).head.symbolicBase.isInstanceOf[StackRegion2] && + varToSym(pos)(arg1).size == 1 && varToSym(pos)(arg1).head.symbolicBase.isInstanceOf[StackLocation] && evaluateExpression(arg2, constProp(pos)).isDefined => val offset = evaluateExpression(arg2, constProp(pos)).get.value + varToSym(pos)(arg1).head.offset if graph.stackMapping.contains(offset) then @@ -54,7 +66,7 @@ class Local( else None case arg: Variable if varToSym.contains(pos) && varToSym(pos).contains(arg) && - varToSym(pos)(arg).size == 1 && varToSym(pos)(arg).head.symbolicBase.isInstanceOf[StackRegion2] => + varToSym(pos)(arg).size == 1 && varToSym(pos)(arg).head.symbolicBase.isInstanceOf[StackLocation] => val offset = varToSym(pos)(arg).head.offset if graph.stackMapping.contains(offset) then Some(graph.stackMapping(offset).cells(0)) @@ -158,7 +170,7 @@ class Local( case Some(value) => value.value case None => 0 val node = DSN(Some(graph), size) - node.allocationRegions.add(HeapRegion2(nextMallocCount, proc, size)) + node.allocationRegions.add(HeapLocation(nextMallocCount, proc, size)) node.flags.heap = true graph.mergeCells(graph.varToCell(n)(mallocRegister)._1, node.cells(0)) case call: DirectCall if params.contains(call.target) => @@ -173,7 +185,7 @@ class Local( val returnArgument = graph.varToCell(n)(variable) graph.mergeCells(adjust(returnArgument), adjust(slice)) } - case LocalAssign(variable, rhs, maybeString) => + case Assign(variable: Variable, rhs: Expr, maybeString) => val expr: Expr = unwrapPaddingAndSlicing(rhs) val lhsCell = adjust(graph.varToCell(n)(variable)) if isGlobal(rhs, n).isDefined then @@ -188,7 +200,7 @@ class Local( && evaluateExpression(arg2, constProp(n)).isDefined && evaluateExpression(arg2, constProp(n)).get.value >= BITVECNEGATIVE => val size = twosComplementToDec(decToBinary(evaluateExpression(arg2, constProp(n)).get.value)) val node = DSN(Some(graph)) - node.allocationRegions.add(StackRegion2("Stack_"+proc.name, proc, -size)) + node.allocationRegions.add(StackLocation("Stack_"+proc.name, proc, -size)) node.flags.stack = true graph.mergeCells(lhsCell, node.cells(0)) @@ -244,7 +256,7 @@ class Local( node.flags.unknown = true graph.collapseNode(node) - case MemoryAssign(memory, MemoryStore(mem, ind, expr: Expr, endian, size), label) if unwrapPaddingAndSlicing(expr).isInstanceOf[Variable] => // if value is a literal ignore it + case MemoryAssign(memory, ind: Expr, expr: Expr, endian: Endian, size: Int, label) if unwrapPaddingAndSlicing(expr).isInstanceOf[Variable] => // if value is a literal ignore it val value: Variable = unwrapPaddingAndSlicing(expr).asInstanceOf[Variable] val index: Expr = unwrapPaddingAndSlicing(ind) reachingDefs(n)(value).foreach(visit) diff --git a/src/main/scala/analysis/ReachingDefs.scala b/src/main/scala/analysis/ReachingDefs.scala index ff330f1ee..675f53444 100644 --- a/src/main/scala/analysis/ReachingDefs.scala +++ b/src/main/scala/analysis/ReachingDefs.scala @@ -1,17 +1,17 @@ package analysis import analysis.solvers.SimplePushDownWorklistFixpointSolver -import ir.{Assert, Assume, BitVecType, CFGPosition, Call, DirectCall, Expr, GoTo, IndirectCall, InterProcIRCursor, IntraProcIRCursor, LocalAssign, MemoryAssign, NOP, Procedure, Program, Register, Variable, computeDomain} +import ir.{Assert, Assume, BitVecType, CFGPosition, Call, DirectCall, Expr, GoTo, IndirectCall, InterProcIRCursor, IntraProcIRCursor, Assign, MemoryAssign, NOP, Procedure, Program, Register, Variable, computeDomain} abstract class ReachingDefs(program: Program, writesTo: Map[Procedure, Set[Register]]) extends Analysis[Map[CFGPosition, Map[Variable, Set[CFGPosition]]]] { - val mallocRegister = Register("R0", BitVecType(64)) + val mallocRegister = Register("R0", 64) val domain: Set[CFGPosition] = computeDomain(IntraProcIRCursor, program.procedures).toSet val lattice: MapLattice[CFGPosition, Map[Variable, Set[CFGPosition]], MapLattice[Variable, Set[CFGPosition], PowersetLattice[CFGPosition]]] = new MapLattice(new MapLattice(new PowersetLattice[CFGPosition]())) def transfer(n: CFGPosition, s: Map[Variable, Set[CFGPosition]]): Map[Variable, Set[CFGPosition]] = n match - case loc:LocalAssign => + case loc:Assign => s + (loc.lhs -> Set(n)) case DirectCall(proc, target, label) if proc.name == "malloc" => s + (mallocRegister -> Set(n)) diff --git a/src/main/scala/analysis/SymbolicAccessAnalysis.scala b/src/main/scala/analysis/SymbolicAccessAnalysis.scala index 613308c5e..027bf1808 100644 --- a/src/main/scala/analysis/SymbolicAccessAnalysis.scala +++ b/src/main/scala/analysis/SymbolicAccessAnalysis.scala @@ -2,42 +2,48 @@ package analysis import analysis.solvers.ForwardIDESolver import ir.IRWalk.procedure -import ir.{BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, GoTo, IndirectCall, Literal, LocalAssign, Memory, MemoryLoad, MemoryStore, Procedure, Program, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend} +import ir.{BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, GoTo, IndirectCall, Literal, Assign, Memory, MemoryLoad, Procedure, Program, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend} import java.math.BigInteger -case class SymbolicAccess(accessor: Variable, symbolicBase: MemoryRegion2, offset: BigInt) { +case class SymbolicAccess(accessor: Variable, symbolicBase: MemoryLocation, offset: BigInt) { override def toString: String = s"SymbolicAccess($accessor, $symbolicBase, $offset)" } -trait MemoryRegion2 { +trait MemoryLocation { val regionIdentifier: String override def toString: String = s"MemoryRegion($regionIdentifier)" } -case class StackRegion2(override val regionIdentifier: String, proc: Procedure, size: BigInt) extends MemoryRegion2 { +case class StackLocation(override val regionIdentifier: String, proc: Procedure, size: BigInt) extends MemoryLocation { override def toString: String = s"Stack($regionIdentifier, $size)" } -case class HeapRegion2(override val regionIdentifier: String, proc: Procedure, size: BigInt) extends MemoryRegion2 { +case class HeapLocation(override val regionIdentifier: String, proc: Procedure, size: BigInt) extends MemoryLocation { override def toString: String = s"Heap($regionIdentifier, $size)" } -case class DataRegion2(override val regionIdentifier: String, start: BigInt, size: BigInt) extends MemoryRegion2 { +case class DataLocation(override val regionIdentifier: String, start: BigInt, size: BigInt) extends MemoryLocation { override def toString: String = s"Data($regionIdentifier, $start, $size)" } -case class UnknownRegion2(override val regionIdentifier: String, proc: Procedure) extends MemoryRegion2 { +case class UnkownLocation(override val regionIdentifier: String, proc: Procedure) extends MemoryLocation { override def toString: String = s"Unknown($regionIdentifier)" } +/** + * environment transformers for SAA or symbolic access analysis + * Combination of reaching definitions and constant propagation + * elements in D are symbolic accesses of the form (variable, symbolic base, concrete offset) + * lattice L is a binary lattice with top and bottom + */ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]]) extends ForwardIDEAnalysis[SymbolicAccess, TwoElement, TwoElementLattice] { - private val stackPointer = Register("R31", BitVecType(64)) - private val linkRegister = Register("R30", BitVecType(64)) - private val framePointer = Register("R29", BitVecType(64)) - private val mallocVariable = Register("R0", BitVecType(64)) + private val stackPointer = Register("R31", 64) + private val linkRegister = Register("R30", 64) + private val framePointer = Register("R29", 64) + private val mallocVariable = Register("R0", 64) var mallocCount: Int = 0 private def nextMallocCount = { @@ -60,7 +66,7 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem d match case Left(value) => value.symbolicBase match - case StackRegion2(regionIdentifier, parent, size) => Map() + case StackLocation(regionIdentifier, parent, size) => Map() case _ => Map(d -> IdEdge()) case Right(_) => Map(d -> IdEdge()) @@ -68,7 +74,7 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem d match case Left(value) => value.symbolicBase match - case StackRegion2(regionIdentifier, parent, size) => Map() + case StackLocation(regionIdentifier, parent, size) => Map() case _ => if value.accessor.name == "R29" then Map() @@ -79,13 +85,13 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem d match case Left(value) => value.symbolicBase match - case StackRegion2(regionIdentifier, parent, size) => Map(d -> IdEdge()) + case StackLocation(regionIdentifier, parent, size) => Map(d -> IdEdge()) case _ => Map() // maps all variables before the call to bottom case Right(_) => Map(d -> IdEdge()) def edgesOther(n: CFGPosition)(d: DL): Map[DL, EdgeFunction[TwoElement]] = n match - case LocalAssign(variable, rhs, maybeString: Option[String]) => + case Assign(variable, rhs, maybeString: Option[String]) => val expr = unwrapPaddingAndSlicing(rhs) expr match case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) && arg1.equals(stackPointer) @@ -95,7 +101,7 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem case Left(value) => Map(d -> IdEdge()) case Right(_) => val size = twosComplementToDec(decToBinary(evaluateExpression(arg2, constProp(n)).get.value)) - Map(d -> IdEdge(), Left(SymbolicAccess(variable, StackRegion2(s"Stack_${procedure(n).name}", procedure(n), -size), 0)) -> ConstEdge(TwoElementTop)) + Map(d -> IdEdge(), Left(SymbolicAccess(variable, StackLocation(s"Stack_${procedure(n).name}", procedure(n), -size), 0)) -> ConstEdge(TwoElementTop)) case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => d match case Left(value) if value.accessor == arg1 => @@ -121,7 +127,7 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem d match case Left(value) if value.accessor == variable => Map() case Left(value) => Map(d -> IdEdge()) - case Right(_) => Map(d -> IdEdge(), Left(SymbolicAccess(variable, UnknownRegion2(nextunknownCount, procedure(n)), 0)) -> ConstEdge(TwoElementTop)) + case Right(_) => Map(d -> IdEdge(), Left(SymbolicAccess(variable, UnkownLocation(nextunknownCount, procedure(n)), 0)) -> ConstEdge(TwoElementTop)) case _ => d match case Left(value) if value.accessor == variable => Map() @@ -134,7 +140,7 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem val size: BigInt = evaluateExpression(mallocVariable, constProp(n)) match case Some(value) => value.value case None => -1 - Map(d -> IdEdge(), Left(SymbolicAccess(mallocVariable, HeapRegion2(nextMallocCount, procedure(n), size), 0)) -> ConstEdge(TwoElementTop)) + Map(d -> IdEdge(), Left(SymbolicAccess(mallocVariable, HeapLocation(nextMallocCount, procedure(n), size), 0)) -> ConstEdge(TwoElementTop)) case _ => Map(d -> IdEdge()) } diff --git a/src/main/scala/analysis/WriteToAnalysis.scala b/src/main/scala/analysis/WriteToAnalysis.scala index be40f7269..f69447063 100644 --- a/src/main/scala/analysis/WriteToAnalysis.scala +++ b/src/main/scala/analysis/WriteToAnalysis.scala @@ -1,22 +1,22 @@ package analysis -import ir.{Assert, Assume, BitVecType, Call, DirectCall, GoTo, LocalAssign, MemoryAssign, NOP, Procedure, Program, Register} +import ir.{Assert, Assume, BitVecType, Call, DirectCall, GoTo, Assign, MemoryAssign, NOP, Procedure, Program, Register} import scala.collection.mutable class WriteToAnalysis(program: Program) extends Analysis[Map[Procedure, Set[Register]]] { val writesTo: mutable.Map[Procedure, Set[Register]] = mutable.Map() - val mallocRegister = Register("R0", BitVecType(64)) + val mallocRegister = Register("R0", 64) val paramRegisters: Set[Register] = Set( mallocRegister, - Register("R1", BitVecType(64)), - Register("R2", BitVecType(64)), - Register("R3", BitVecType(64)), - Register("R4", BitVecType(64)), - Register("R5", BitVecType(64)), - Register("R6", BitVecType(64)), - Register("R7", BitVecType(64)), + Register("R1", 64), + Register("R2", 64), + Register("R3", 64), + Register("R4", 64), + Register("R5", 64), + Register("R6", 64), + Register("R7", 64), ) def getWritesTos(proc: Procedure): Set[Register] = { @@ -27,7 +27,7 @@ class WriteToAnalysis(program: Program) extends Analysis[Map[Procedure, Set[Regi proc.blocks.foreach( block => block.statements.foreach { - case LocalAssign(variable: Register, value, label) if paramRegisters.contains(variable) => + case Assign(variable: Register, value, label) if paramRegisters.contains(variable) => writtenTo.add(variable) case _ => } diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index c2fe10db1..6e47765e4 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -565,6 +565,7 @@ object StaticAnalysis { config: StaticAnalysisConfig, iteration: Int ): StaticAnalysisContext = { + val before = System.nanoTime() val IRProgram: Program = ctx.program val externalFunctions: Set[ExternalFunction] = ctx.externalFunctions val globals: Set[SpecGlobal] = ctx.globals @@ -585,7 +586,6 @@ object StaticAnalysis { Logger.info("Subroutine Addresses:") Logger.info(subroutines) - // reducible loops val detector = LoopDetector(IRProgram) val foundLoops = detector.identify_loops() @@ -595,6 +595,7 @@ object StaticAnalysis { val newLoops = transformer.llvm_transform() newLoops.foreach(l => Logger.info(s"Loop found: ${l.name}")) + config.analysisDotPath.foreach { s => val newCFG = ProgramCfgFactory().fromIR(IRProgram) writeToFile(newCFG.toDot(x => x.toString, Output.dotIder), s"${s}_resolvedCFG-reducible.dot") @@ -605,23 +606,29 @@ object StaticAnalysis { val cfg = ProgramCfgFactory().fromIR(IRProgram) + val domain = computeDomain(IntraProcIRCursor, IRProgram.procedures) Logger.info("[!] Running ANR") val ANRSolver = ANRAnalysisSolver(IRProgram) val ANRResult = ANRSolver.analyze() + Logger.info("[!] Running RNA") val RNASolver = RNAAnalysisSolver(IRProgram) val RNAResult = RNASolver.analyze() + Logger.info("[!] Running Constant Propagation") val constPropSolver = ConstantPropagationSolver(IRProgram) val constPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = constPropSolver.analyze() + + Logger.info("[!] Running IR Simple Value Analysis") val ilcpsolver = IRSimpleValueAnalysis.Solver(IRProgram) val newCPResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = ilcpsolver.analyze() + config.analysisResultsPath.foreach(s => writeToFile(printAnalysisResults(IRProgram, newCPResult), s"${s}_new_ir_constprop$iteration.txt") ) @@ -631,9 +638,11 @@ object StaticAnalysis { writeToFile(toDot(dumpdomain, InterProcIRCursor, Map.empty), s"${f}_new_ir_intercfg$iteration.dot") }) + Logger.info("[!] Running Reaching Definitions Analysis") val reachingDefinitionsAnalysisSolver = ReachingDefinitionsAnalysisSolver(IRProgram) val reachingDefinitionsAnalysisResults = reachingDefinitionsAnalysisSolver.analyze() + config.analysisDotPath.foreach(s => { writeToFile( toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> reachingDefinitionsAnalysisResults(b).toString).toMap), @@ -646,6 +655,7 @@ object StaticAnalysis { val regionAccessesAnalysisSolver = RegionAccessesAnalysisSolver(cfg, constPropResult, reachingDefinitionsAnalysisResults) val regionAccessesAnalysisResults = regionAccessesAnalysisSolver.analyze() + config.analysisDotPath.foreach(s => writeToFile(cfg.toDot(Output.labeler(regionAccessesAnalysisResults, true), Output.dotIder), s"${s}_RegTo$iteration.dot")) config.analysisResultsPath.foreach(s => writeToFile(printAnalysisResults(cfg, regionAccessesAnalysisResults, iteration), s"${s}_RegTo$iteration.txt")) @@ -653,10 +663,24 @@ object StaticAnalysis { val constPropSolverWithSSA = ConstantPropagationSolverWithSSA(IRProgram, reachingDefinitionsAnalysisResults) val constPropResultWithSSA = constPropSolverWithSSA.analyze() + Logger.info("[!] Running MRA") val mraSolver = MemoryRegionAnalysisSolver(IRProgram, globalAddresses, globalOffsets, mergedSubroutines, constPropResult, ANRResult, RNAResult, regionAccessesAnalysisResults, reachingDefinitionsAnalysisResults) val mraResult = mraSolver.analyze() + Logger.info("[!] Running MMM") + val mmm = MemoryModelMap() + mmm.convertMemoryRegions(mraResult, mergedSubroutines, globalOffsets, mraSolver.procedureToSharedRegions) + mmm.logRegions() + + + Logger.info("[!] Running Steensgaard") + val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResultWithSSA, regionAccessesAnalysisResults, mmm, reachingDefinitionsAnalysisResults, globalOffsets) + steensgaardSolver.analyze() + val steensgaardResults = steensgaardSolver.pointsTo() + val memoryRegionContents = steensgaardSolver.getMemoryRegionContents + mmm.logRegions(memoryRegionContents) + config.analysisDotPath.foreach(s => { writeToFile(dotCallGraph(IRProgram), s"${s}_callgraph$iteration.dot") writeToFile( @@ -675,18 +699,6 @@ object StaticAnalysis { ) }) - Logger.info("[!] Running MMM") - val mmm = MemoryModelMap() - mmm.convertMemoryRegions(mraResult, mergedSubroutines, globalOffsets, mraSolver.procedureToSharedRegions) - mmm.logRegions() - - Logger.info("[!] Running Steensgaard") - val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResultWithSSA, regionAccessesAnalysisResults, mmm, reachingDefinitionsAnalysisResults, globalOffsets) - steensgaardSolver.analyze() - val steensgaardResults = steensgaardSolver.pointsTo() - val memoryRegionContents = steensgaardSolver.getMemoryRegionContents - mmm.logRegions(memoryRegionContents) - Logger.info("[!] Running VSA") val vsaSolver = ValueSetAnalysisSolver(IRProgram, globalAddresses, externalAddresses, globalOffsets, subroutines, mmm, constPropResult) @@ -716,7 +728,6 @@ object StaticAnalysis { locals = None, bus = None, tds = None, - reachingDefs = reachingDefinitionsAnalysisResults ) } diff --git a/src/test/scala/LocalTest.scala b/src/test/scala/LocalTest.scala index 650826ac6..508bd635f 100644 --- a/src/test/scala/LocalTest.scala +++ b/src/test/scala/LocalTest.scala @@ -1,6 +1,6 @@ -import analysis.{DSC, DSG, DSN, DataRegion2, HeapRegion2} +import analysis.{DSC, DSG, DSN, DataLocation, HeapLocation} import ir.Endian.BigEndian -import ir.{BVADD, BinaryExpr, BitVecLiteral, ConvertToSingleProcedureReturn, DirectCall, LocalAssign, Memory, MemoryAssign, MemoryLoad, MemoryStore} +import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, ConvertToSingleProcedureReturn, DirectCall, Memory, MemoryAssign, MemoryLoad, SharedMemory} import org.scalatest.funsuite.AnyFunSuite import test_util.TestUtil import ir.dsl.* @@ -42,69 +42,6 @@ class LocalTest extends AnyFunSuite, TestUtil { } -// test("local jumptable2_clang add_two") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2_clang.adt", -// relfFile = "examples/jumptable2/jumptable2_clang.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.locals.get(program.procs("add_two")) -// assert(dsg.pointTo.size == 7) -// assert(dsg.stackMapping.isEmpty) -// assert(dsg.pointTo(dsg.globalMapping((69680, 69684))._1.cells(0))._1.node.get.collapsed) -// } -// -// test("local jumptable2_clang add_six") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2_clang.adt", -// relfFile = "examples/jumptable2/jumptable2_clang.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.locals.get(program.procs("add_six")) -// assert(dsg.pointTo.size == 7) -// assert(dsg.stackMapping.isEmpty) -// assert(dsg.pointTo(dsg.globalMapping((69680, 69684))._1.cells(0))._1.node.get.collapsed) -// } -// -// test("local jumptable2_clang sub_seven") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2_clang.adt", -// relfFile = "examples/jumptable2/jumptable2_clang.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.locals.get(program.procs("sub_seven")) -// assert(dsg.pointTo.size == 7) -// assert(dsg.stackMapping.isEmpty) -// assert(dsg.pointTo(dsg.globalMapping((69680, 69684))._1.cells(0))._1.node.get.collapsed) -// } - test("local jumptable2 sub_seven") { val results = RunUtils.loadAndTranslate( BASILConfig( @@ -246,10 +183,6 @@ class LocalTest extends AnyFunSuite, TestUtil { assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) - - // assert(dsg.pointTo.size == 7) - // assert(dsg.stackMapping.isEmpty) - // assert(dsg.pointTo(dsg.globalMapping((69680, 69684))._1.cells(0))._1.node.get.collapsed) } @@ -270,7 +203,6 @@ class LocalTest extends AnyFunSuite, TestUtil { ) val program = results.ir.program val dsg = results.analysis.get.locals.get(program.mainProcedure) - print("") // assert(dsg.pointTo.size == 7) // assert(dsg.stackMapping.isEmpty) // assert(dsg.pointTo(dsg.globalMapping((69680, 69684))._1.cells(0))._1.node.get.collapsed) @@ -313,9 +245,9 @@ class LocalTest extends AnyFunSuite, TestUtil { assert(dsg.pointTo(stack24).equals(dsg.pointTo(stack32))) assert(dsg.pointTo(stack24)._2 == 0) assert(dsg.pointTo(stack24)._1.node.get.allocationRegions.size == 1) - assert(dsg.pointTo(stack24)._1.node.get.allocationRegions.head.asInstanceOf[HeapRegion2].size == 20) + assert(dsg.pointTo(stack24)._1.node.get.allocationRegions.head.asInstanceOf[HeapLocation].size == 20) assert(dsg.pointTo(stack40)._1.node.get.allocationRegions.size == 1) - assert(dsg.pointTo(stack48)._1.node.get.allocationRegions.head.asInstanceOf[HeapRegion2].size == 8) + assert(dsg.pointTo(stack48)._1.node.get.allocationRegions.head.asInstanceOf[HeapLocation].size == 8) assert(dsg.pointTo(dsg.pointTo(stack48)._1.node.get.cells(0)).equals(dsg.pointTo(stack40))) assert(dsg.pointTo(dsg.pointTo(stack48)._1.node.get.cells(0)).equals(dsg.pointTo(stack56))) assert(dsg.pointTo(stack24)._1.equals(dsg.pointTo(stack40)._1)) @@ -379,17 +311,17 @@ class LocalTest extends AnyFunSuite, TestUtil { test("internal merge") { - val mem = Memory("mem", 10000, 10000) - val locAssign1 = LocalAssign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) - val locAssign2 = LocalAssign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) + val mem = SharedMemory("mem", 10000, 10000) + val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) + val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) var program = prog( proc("main", block("operations", -// LocalAssign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), +// Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), locAssign1, locAssign2, - MemoryAssign(mem, MemoryStore(mem, R7, R1, BigEndian, 64), Some("00003")), - MemoryAssign(mem, MemoryStore(mem, R6, R2, BigEndian, 64), Some("00004")), + MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), + MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), ret ) ) @@ -411,18 +343,18 @@ class LocalTest extends AnyFunSuite, TestUtil { } test("offsetting from middle of cell to a new cell") { - val mem = Memory("mem", 10000, 10000) - val locAssign1 = LocalAssign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) - val locAssign2 = LocalAssign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) - val locAssign3 = LocalAssign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(8, 64)), Some("00005")) + val mem = SharedMemory("mem", 10000, 10000) + val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) + val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) + val locAssign3 = Assign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(8, 64)), Some("00005")) var program = prog( proc("main", block("operations", locAssign1, locAssign2, - MemoryAssign(mem, MemoryStore(mem, R7, R1, BigEndian, 64), Some("00003")), - MemoryAssign(mem, MemoryStore(mem, R6, R2, BigEndian, 64), Some("00004")), + MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), + MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), locAssign3, ret ) @@ -438,19 +370,19 @@ class LocalTest extends AnyFunSuite, TestUtil { } test("offsetting from middle of cell to the same cell") { - val mem = Memory("mem", 10000, 10000) - val locAssign1 = LocalAssign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) - val locAssign2 = LocalAssign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) - val locAssign3 = LocalAssign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(7, 64)), Some("00005")) + val mem = SharedMemory("mem", 10000, 10000) + val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) + val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) + val locAssign3 = Assign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(7, 64)), Some("00005")) var program = prog( proc("main", block("operations", - // LocalAssign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), + // Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), locAssign1, locAssign2, - MemoryAssign(mem, MemoryStore(mem, R7, R1, BigEndian, 64), Some("00003")), - MemoryAssign(mem, MemoryStore(mem, R6, R2, BigEndian, 64), Some("00004")), + MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), + MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), locAssign3, ret ) @@ -474,19 +406,19 @@ class LocalTest extends AnyFunSuite, TestUtil { } test("internal offset transfer") { - val mem = Memory("mem", 10000, 10000) - val locAssign1 = LocalAssign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) - val locAssign2 = LocalAssign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) - val locAssign3 = LocalAssign(R5, R7, Some("00005")) + val mem = SharedMemory("mem", 10000, 10000) + val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) + val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) + val locAssign3 = Assign(R5, R7, Some("00005")) var program = prog( proc("main", block("operations", - // LocalAssign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), + // Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), locAssign1, locAssign2, - MemoryAssign(mem, MemoryStore(mem, R7, R1, BigEndian, 64), Some("00003")), - MemoryAssign(mem, MemoryStore(mem, R6, R2, BigEndian, 64), Some("00004")), + MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), + MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), locAssign3, ret ) @@ -650,69 +582,6 @@ class LocalTest extends AnyFunSuite, TestUtil { } -// test("bottom-up jumptable2_clang add_two") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2_clang.adt", -// relfFile = "examples/jumptable2/jumptable2_clang.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.bus.get(program.procs("add_two")) -// assert(dsg.pointTo.size == 7) -// assert(dsg.stackMapping.isEmpty) -// assert(dsg.pointTo(dsg.globalMapping((69680, 69684))._1.cells(0))._1.node.get.collapsed) -// } -// -// test("bottom-up jumptable2_clang add_six") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2_clang.adt", -// relfFile = "examples/jumptable2/jumptable2_clang.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.bus.get(program.procs("add_six")) -// assert(dsg.pointTo.size == 7) -// assert(dsg.stackMapping.isEmpty) -// assert(dsg.pointTo(dsg.globalMapping((69680, 69684))._1.cells(0))._1.node.get.collapsed) -// } -// -// test("bottom-up jumptable2_clang sub_seven") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2_clang.adt", -// relfFile = "examples/jumptable2/jumptable2_clang.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.bus.get(program.procs("sub_seven")) -// assert(dsg.pointTo.size == 7) -// assert(dsg.stackMapping.isEmpty) -// assert(dsg.pointTo(dsg.globalMapping((69680, 69684))._1.cells(0))._1.node.get.collapsed) -// } - test("bottom up interproc pointer arithmetic callee") { // same as interproc pointer arithmetic callee's local graph (no changes should have been made) From c2f5a4b35339374835297aa5c129de15fb9515d2 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 1 Jul 2024 15:26:32 +1000 Subject: [PATCH 014/104] fixes --- src/main/scala/analysis/Local.scala | 173 ++++++++++-------- .../analysis/SymbolicAccessAnalysis.scala | 43 +++-- .../scala/analysis/solvers/IDESolver.scala | 4 +- src/main/scala/util/RunUtils.scala | 1 - src/test/scala/RegionBuilderTests.scala | 28 --- 5 files changed, 120 insertions(+), 129 deletions(-) delete mode 100644 src/test/scala/RegionBuilderTests.scala diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index 9faa83a34..d9cf894ee 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -58,13 +58,15 @@ class Local( def isStack(expr: Expr, pos: CFGPosition): Option[DSC] = expr match case BinaryExpr(op, arg1: Variable, arg2) if varToSym.contains(pos) && varToSym(pos).contains(arg1) && - varToSym(pos)(arg1).size == 1 && varToSym(pos)(arg1).head.symbolicBase.isInstanceOf[StackLocation] && - evaluateExpression(arg2, constProp(pos)).isDefined => - val offset = evaluateExpression(arg2, constProp(pos)).get.value + varToSym(pos)(arg1).head.offset - if graph.stackMapping.contains(offset) then - Some(graph.stackMapping(offset).cells(0)) - else - None + varToSym(pos)(arg1).size == 1 && varToSym(pos)(arg1).head.symbolicBase.isInstanceOf[StackLocation] => + evaluateExpression(arg2, constProp(pos)) match + case Some(v) => + val offset = v.value + varToSym(pos)(arg1).head.offset + if graph.stackMapping.contains(offset) then + Some(graph.stackMapping(offset).cells(0)) + else + None + case None => None case arg: Variable if varToSym.contains(pos) && varToSym(pos).contains(arg) && varToSym(pos)(arg).size == 1 && varToSym(pos)(arg).head.symbolicBase.isInstanceOf[StackLocation] => val offset = varToSym(pos)(arg).head.offset @@ -86,16 +88,21 @@ class Local( def isGlobal(expr: Expr, pos: CFGPosition, size: Int = 0): Option[DSC] = - if evaluateExpression(expr, constProp(pos)).isDefined && graph.isGlobal(evaluateExpression(expr, constProp(pos)).get.value).isDefined then - val address = evaluateExpression(expr, constProp(pos)).get.value - val ((baseAddress: BigInt, end: BigInt), (node: DSN, internal: BigInt)) = graph.isGlobal(evaluateExpression(expr, constProp(pos)).get.value).get - val offset = address - baseAddress - node.addCell(internal + offset, size) - graph.optionalCollapse(node) - if node.collapsed then - Some(node.cells(0)) + val value = evaluateExpression(expr, constProp(pos)) + if value.isDefined then + val global = graph.isGlobal(value.get.value) + if global.isDefined then + val address = value.get.value + val ((baseAddress: BigInt, end: BigInt), (node: DSN, internal: BigInt)) = global.get + val offset = address - baseAddress + node.addCell(internal + offset, size) + graph.optionalCollapse(node) + if node.collapsed then + Some(node.cells(0)) + else + Some(node.getCell(internal + offset)) else - Some(node.getCell(internal + offset)) + None else None @@ -157,7 +164,29 @@ class Local( graph.optionalCollapse(result.node.get) result.node.get.getCell(result.offset) + def unsupportedPointerArithmeticOperation(n: CFGPosition, expr: Expr, lhsCell: DSC): Unit = { + var containsPointer = false + breakable { + for (v <- expr.variables) { + if varToSym.contains(n) && varToSym(n).contains(v) then + containsPointer = true + break + } + } + if containsPointer then + val cell = expr.variables.foldLeft(lhsCell) { + (c, v) => + val cells: Set[(DSC, BigInt)] = graph.getCells(n, v) + cells.foldLeft(c) { + (c, p) => + graph.mergeCells(c, p._1) + } + } + val node = cell.node.get + node.flags.unknown = true + graph.collapseNode(node) + } def visit(n: CFGPosition): Unit = { if visited.contains(n) then @@ -188,25 +217,29 @@ class Local( case Assign(variable: Variable, rhs: Expr, maybeString) => val expr: Expr = unwrapPaddingAndSlicing(rhs) val lhsCell = adjust(graph.varToCell(n)(variable)) - if isGlobal(rhs, n).isDefined then - val global = isGlobal(rhs, n).get - graph.mergeCells(lhsCell, global) - else if isStack(rhs, n).isDefined then // just in case stack can't be recognised in after this assignment - val stack = isStack(rhs, n).get - graph.mergeCells(lhsCell, stack) + var global = isGlobal(rhs, n) + var stack = isStack(rhs, n) + if global.isDefined then + graph.mergeCells(lhsCell, global.get) + else if stack.isDefined then // just in case stack can't be recognised in after this assignment + graph.mergeCells(lhsCell, stack.get) else expr match - case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) && arg1.equals(stackPointer) - && evaluateExpression(arg2, constProp(n)).isDefined && evaluateExpression(arg2, constProp(n)).get.value >= BITVECNEGATIVE => - val size = twosComplementToDec(decToBinary(evaluateExpression(arg2, constProp(n)).get.value)) - val node = DSN(Some(graph)) - node.allocationRegions.add(StackLocation("Stack_"+proc.name, proc, -size)) - node.flags.stack = true - graph.mergeCells(lhsCell, node.cells(0)) + case BinaryExpr(op, arg1: Variable, arg2) => + val arg2Offset = evaluateExpression(arg2, constProp(n)) - case BinaryExpr(op, arg1: Variable, arg2) if /*varToSym.contains(n) && varToSym(n).contains(arg1) && */ evaluateExpression(arg2, constProp(n)).isDefined => - val offset = evaluateExpression(arg2, constProp(n)).get.value - visitPointerArithmeticOperation(n, lhsCell, arg1, 0, false, offset) + if op.equals(BVADD) && arg1.equals(stackPointer) + && arg2Offset.isDefined && arg2Offset.get.value >= BITVECNEGATIVE then + val size = twosComplementToDec(decToBinary(evaluateExpression(arg2, constProp(n)).get.value)) + val node = DSN(Some(graph)) + node.allocationRegions.add(StackLocation("Stack_"+proc.name, proc, -size)) + node.flags.stack = true + graph.mergeCells(lhsCell, node.cells(0)) + else if /*varToSym.contains(n) && varToSym(n).contains(arg1) && */ arg2Offset.isDefined then + val offset = evaluateExpression(arg2, constProp(n)).get.value + visitPointerArithmeticOperation(n, lhsCell, arg1, 0, false, offset) + else + unsupportedPointerArithmeticOperation(n, expr, lhsCell) case arg: Variable /*if varToSym.contains(n) && varToSym(n).contains(arg)*/ => visitPointerArithmeticOperation(n, lhsCell, arg, 0) @@ -214,67 +247,53 @@ class Local( case MemoryLoad(mem, index, endian, size) => val byteSize = (size.toDouble/8).ceil.toInt lhsCell.node.get.flags.read = true - if isGlobal(index, n, byteSize).isDefined then - val global = isGlobal(index, n, byteSize).get - graph.mergeCells(lhsCell, graph.getPointeeAdjusted(global)) - else if isStack(index, n).isDefined then - val stack = isStack(index, n).get - graph.mergeCells(lhsCell, graph.getPointeeAdjusted(stack)) + global = isGlobal(index, n, byteSize) + stack = isStack(index, n) + if global.isDefined then + graph.mergeCells(lhsCell, graph.getPointeeAdjusted(global.get)) + else if stack.isDefined then + graph.mergeCells(lhsCell, graph.getPointeeAdjusted(stack.get)) else index match - case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => -// assert(varToSym(n).contains(arg1)) - val offset = evaluateExpression(arg2, constProp(n)).get.value - visitPointerArithmeticOperation(n, lhsCell, arg1, byteSize, true, offset) - case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isEmpty=> -// assert(varToSym(n).contains(arg1)) - visitPointerArithmeticOperation(n, lhsCell, arg1, byteSize, true, 0, true) + case BinaryExpr(op, arg1: Variable, arg2) => + evaluateExpression(arg2, constProp(n)) match + case Some(v) => +// assert(varToSym(n).contains(arg1)) + val offset = v.value + visitPointerArithmeticOperation(n, lhsCell, arg1, byteSize, true, offset) + case None => +// assert(varToSym(n).contains(arg1)) + visitPointerArithmeticOperation(n, lhsCell, arg1, byteSize, true, 0, true) case arg: Variable => // assert(varToSym(n).contains(arg)) visitPointerArithmeticOperation(n, lhsCell, arg, byteSize, true) case _ => ??? case _ => - var containsPointer = false - breakable { - for (v <- expr.variables) { - if varToSym.contains(n) && varToSym(n).contains(v) then - containsPointer = true - break - } - } - if containsPointer then - val cell = expr.variables.foldLeft(lhsCell) { - (c, v) => - val cells: Set[(DSC, BigInt)] = graph.getCells(n, v) - - cells.foldLeft(c) { - (c, p) => - graph.mergeCells(c, p._1) - } - } - val node = cell.node.get - node.flags.unknown = true - graph.collapseNode(node) + unsupportedPointerArithmeticOperation(n, expr, lhsCell) case MemoryAssign(memory, ind: Expr, expr: Expr, endian: Endian, size: Int, label) if unwrapPaddingAndSlicing(expr).isInstanceOf[Variable] => // if value is a literal ignore it val value: Variable = unwrapPaddingAndSlicing(expr).asInstanceOf[Variable] val index: Expr = unwrapPaddingAndSlicing(ind) reachingDefs(n)(value).foreach(visit) val byteSize = (size.toDouble/8).ceil.toInt + val global = isGlobal(index, n, byteSize) + val stack = isStack(index, n) val addressPointee: DSC = - if isGlobal(ind, n, byteSize).isDefined then - graph.getPointeeAdjusted(isGlobal(ind, n, byteSize).get) - else if isStack(ind, n).isDefined then - graph.getPointeeAdjusted(isStack(ind, n).get) + if global.isDefined then + graph.getPointeeAdjusted(global.get) + else if stack.isDefined then + graph.getPointeeAdjusted(stack.get) else index match - case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => -// assert(varToSym(n).contains(arg1)) - val offset = evaluateExpression(arg2, constProp(n)).get.value - visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg1, byteSize, true, offset) - case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isEmpty=> -// assert(varToSym(n).contains(arg1)) - visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg1, byteSize, true, 0, true) + case BinaryExpr(op, arg1: Variable, arg2) => + evaluateExpression(arg2, constProp(n)) match + case Some(v) => +// assert(varToSym(n).contains(arg1)) + val offset = v.value + visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg1, byteSize, true, offset) + case None => +// assert(varToSym(n).contains(arg1)) + visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg1, byteSize, true, 0, true) case arg: Variable => // assert(varToSym(n).contains(arg)) visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg, byteSize, true) diff --git a/src/main/scala/analysis/SymbolicAccessAnalysis.scala b/src/main/scala/analysis/SymbolicAccessAnalysis.scala index 027bf1808..a3ea99a59 100644 --- a/src/main/scala/analysis/SymbolicAccessAnalysis.scala +++ b/src/main/scala/analysis/SymbolicAccessAnalysis.scala @@ -28,7 +28,7 @@ case class DataLocation(override val regionIdentifier: String, start: BigInt, si override def toString: String = s"Data($regionIdentifier, $start, $size)" } -case class UnkownLocation(override val regionIdentifier: String, proc: Procedure) extends MemoryLocation { +case class UnknownLocation(override val regionIdentifier: String, proc: Procedure) extends MemoryLocation { override def toString: String = s"Unknown($regionIdentifier)" } @@ -94,25 +94,28 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem case Assign(variable, rhs, maybeString: Option[String]) => val expr = unwrapPaddingAndSlicing(rhs) expr match - case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) && arg1.equals(stackPointer) - && evaluateExpression(arg2, constProp(n)).isDefined && evaluateExpression(arg2, constProp(n)).get.value >= BITVECNEGATIVE => - d match - case Left(value) if value.accessor == variable => Map() - case Left(value) => Map(d -> IdEdge()) - case Right(_) => - val size = twosComplementToDec(decToBinary(evaluateExpression(arg2, constProp(n)).get.value)) - Map(d -> IdEdge(), Left(SymbolicAccess(variable, StackLocation(s"Stack_${procedure(n).name}", procedure(n), -size), 0)) -> ConstEdge(TwoElementTop)) - case BinaryExpr(op, arg1: Variable, arg2) if evaluateExpression(arg2, constProp(n)).isDefined => - d match - case Left(value) if value.accessor == arg1 => - val offsetUpdate = evaluateExpression(arg2, constProp(n)).get.value - val result: Map[DL, EdgeFunction[TwoElement]] = Map(Left(SymbolicAccess(variable, value.symbolicBase, value.offset + offsetUpdate)) -> ConstEdge(TwoElementTop)) - if value.accessor != variable then - result + (d -> IdEdge()) + case BinaryExpr(op, arg1: Variable, arg2) => + evaluateExpression(arg2, constProp(n)) match + case Some(v) => + if op.equals(BVADD) && arg1.equals(stackPointer) && v.value >= BITVECNEGATIVE then + d match + case Left(value) if value.accessor == variable => Map() + case Left(value) => Map(d -> IdEdge()) + case Right(_) => + val size = twosComplementToDec(decToBinary(v.value)) + Map(d -> IdEdge(), Left(SymbolicAccess(variable, StackLocation(s"Stack_${procedure(n).name}", procedure(n), -size), 0)) -> ConstEdge(TwoElementTop)) else - result - case Left(value) if value.accessor == variable => Map() - case _ => Map(d -> IdEdge()) + d match + case Left(value) if value.accessor == arg1 => + val offsetUpdate = evaluateExpression(arg2, constProp(n)).get.value + val result: Map[DL, EdgeFunction[TwoElement]] = Map(Left(SymbolicAccess(variable, value.symbolicBase, value.offset + offsetUpdate)) -> ConstEdge(TwoElementTop)) + if value.accessor != variable then + result + (d -> IdEdge()) + else + result + case Left(value) if value.accessor == variable => Map() + case _ => Map(d -> IdEdge()) + case None => Map(d -> IdEdge()) case arg:Variable => d match case Left(value) if value.accessor == arg => @@ -127,7 +130,7 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem d match case Left(value) if value.accessor == variable => Map() case Left(value) => Map(d -> IdEdge()) - case Right(_) => Map(d -> IdEdge(), Left(SymbolicAccess(variable, UnkownLocation(nextunknownCount, procedure(n)), 0)) -> ConstEdge(TwoElementTop)) + case Right(_) => Map(d -> IdEdge(), Left(SymbolicAccess(variable, UnknownLocation(nextunknownCount, procedure(n)), 0)) -> ConstEdge(TwoElementTop)) case _ => d match case Left(value) if value.accessor == variable => Map() diff --git a/src/main/scala/analysis/solvers/IDESolver.scala b/src/main/scala/analysis/solvers/IDESolver.scala index 1799e36e4..106771317 100644 --- a/src/main/scala/analysis/solvers/IDESolver.scala +++ b/src/main/scala/analysis/solvers/IDESolver.scala @@ -245,9 +245,7 @@ abstract class BackwardIDESolver[D, T, L <: Lattice[T]](program: Program) protected def callToReturn(call: GoTo): DirectCall = call.parent.jump.asInstanceOf[DirectCall] - protected def returnToCall(ret: DirectCall): GoTo = - if ret.parent.fallthrough.isEmpty then - print("") + protected def returnToCall(ret: DirectCall): GoTo = ret.parent.fallthrough.get protected def getCallee(call: GoTo): IndirectCall = callToReturn(call).target.end.asInstanceOf[IndirectCall] diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 6e47765e4..b3cc60bab 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -565,7 +565,6 @@ object StaticAnalysis { config: StaticAnalysisConfig, iteration: Int ): StaticAnalysisContext = { - val before = System.nanoTime() val IRProgram: Program = ctx.program val externalFunctions: Set[ExternalFunction] = ctx.externalFunctions val globals: Set[SpecGlobal] = ctx.globals diff --git a/src/test/scala/RegionBuilderTests.scala b/src/test/scala/RegionBuilderTests.scala deleted file mode 100644 index 70b37086b..000000000 --- a/src/test/scala/RegionBuilderTests.scala +++ /dev/null @@ -1,28 +0,0 @@ -import org.scalatest.funsuite.AnyFunSuite -import test_util.TestUtil -import util.{BASILConfig, BoogieGeneratorConfig, ILLoadingConfig, RunUtils, StaticAnalysisConfig} - -class RegionBuilderTests extends AnyFunSuite, TestUtil { - - for (p <- correctPrograms) { - val programPath = correctPath + "/" + p - val variations = getSubdirectories(programPath) - variations.foreach(t => - test("Correct" + "/" + p + "/" + t) { - RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = correctPath + s"/$p/$t/$p.adt", - relfFile = correctPath + s"/$p/$t/$p.relf", - specFile = None, - dumpIL = None - ), - staticAnalysis = Some(StaticAnalysisConfig(None)), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - } - ) - } -} From 75a3bfb3940753636b1d1cd94ace2919dd9ee7bc Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 8 Jul 2024 12:03:36 +1000 Subject: [PATCH 015/104] changed slice tuples to case class --- src/main/scala/analysis/DSA.scala | 20 +-- src/main/scala/analysis/DSAUtility.scala | 154 ++++++++++++----------- src/main/scala/analysis/Local.scala | 8 +- 3 files changed, 96 insertions(+), 86 deletions(-) diff --git a/src/main/scala/analysis/DSA.scala b/src/main/scala/analysis/DSA.scala index fda0ebacf..efd92cdff 100644 --- a/src/main/scala/analysis/DSA.scala +++ b/src/main/scala/analysis/DSA.scala @@ -99,9 +99,9 @@ class DSA(program: Program, } calleeGraph.formals.foreach{ - case (variable: Variable, (cell: DSC, internalOffset: BigInt)) if !ignoreRegisters.contains(variable) => + case (variable: Variable, slice: Slice) if !ignoreRegisters.contains(variable) => assert(callSite.paramCells.contains(variable)) - val node = cell.node.get + val node = slice.node node.cloneNode(calleeGraph, buGraph) case _ => } @@ -113,8 +113,8 @@ class DSA(program: Program, val returnCells = calleeGraph.getCells(end(callee), reg) assert(returnCells.nonEmpty) returnCells.foreach{ - case (cell: DSC, internalOffset: BigInt) => - val node = cell.node.get + case slice: Slice => + val node = slice.node node.cloneNode(calleeGraph, buGraph) } ) @@ -173,14 +173,14 @@ class DSA(program: Program, callSite.paramCells.foreach{ - case (variable: Variable, (cell: DSC, internalOffset: BigInt)) => - val node = cell.node.get + case (variable: Variable, slice: Slice) => + val node = slice.node node.cloneNode(callersGraph, calleesGraph) } callSite.returnCells.foreach{ - case (variable: Variable, (cell: DSC, internalOffset: BigInt)) => - val node = cell.node.get + case (variable: Variable, slice: Slice) => + val node = slice.node node.cloneNode(callersGraph, callersGraph) } @@ -201,10 +201,10 @@ class DSA(program: Program, ) calleesGraph.varToCell.getOrElse(callSite.call, Map.empty).foreach{ - case (variable: Variable, cell: (DSC, BigInt)) => + case (variable: Variable, cell: Slice) => val returnCells = calleesGraph.getCells(end(callee), variable) returnCells.foldLeft(adjust(cell)){ - case (c: DSC, retCell: (DSC, BigInt)) => + case (c: DSC, retCell: Slice) => calleesGraph.mergeCells(c, adjust(retCell)) } case _ => ??? diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index cc71fbcde..33641590c 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -39,7 +39,7 @@ class DSG(val proc: Procedure, ) { // DSNodes owned by this graph val nodes: mutable.Set[DSN] = mutable.Set() - val pointTo: mutable.Map[DSC, (DSC, BigInt)] = mutable.Map() + val pointTo: mutable.Map[DSC, Slice] = mutable.Map() val callsites: mutable.Set[CallSite] = mutable.Set() val mallocRegister = Register("R0", 64) @@ -141,7 +141,7 @@ class DSG(val proc: Procedure, globalMapping.update((relocatedAddress, relocatedAddress + 8), (node, 0)) node - pointTo.update(node.cells(field), (isGlobal(address).get._2._1.cells(0), 0)) + pointTo.update(node.cells(field), Slice(isGlobal(address).get._2._1.cells(0), 0)) address = relocatedAddress } ) @@ -174,23 +174,23 @@ class DSG(val proc: Procedure, varToCell.foreach( (pos, m) => m.foreach { - case (variable, (cell, offset)) => - if cell.equals(oldCell) then - m.update(variable, (newCell, offset + internalOffsetChange)) + case (variable, slice) => + if slice.cell.equals(oldCell) then + m.update(variable, Slice(newCell, slice.internalOffset + internalOffsetChange)) } ) formals.foreach{ - case (variable, (cell, offset)) => - if cell.equals(oldCell) then - formals.update(variable, (newCell, offset + internalOffsetChange)) + case (variable, slice) => + if slice.cell.equals(oldCell) then + formals.update(variable, Slice(newCell, slice.internalOffset + internalOffsetChange)) } private def replaceInPointTo(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = pointTo.foreach { - case (pointer, (cell: DSC, pointeeInternal: BigInt)) => - if cell.equals(oldCell) then - pointTo.update(pointer, (newCell, pointeeInternal + internalOffsetChange)) + case (pointer, slice: Slice) => + if slice.cell.equals(oldCell) then + pointTo.update(pointer, Slice(newCell, slice.internalOffset + internalOffsetChange)) } private def replaceInGlobals(oldCell: DSC, newCell: DSC) = @@ -215,15 +215,15 @@ class DSG(val proc: Procedure, callsites.foreach( callSite => callSite.returnCells.foreach{ - case (variable: Variable, (cell: DSC, internal: BigInt)) => - if cell.equals(oldCell) then - callSite.returnCells.update(variable, (newCell, internal + internalOffsetChange)) + case (variable: Variable, slice: Slice) => + if slice.cell.equals(oldCell) then + callSite.returnCells.update(variable, Slice(newCell, slice.internalOffset + internalOffsetChange)) } callSite.paramCells.foreach{ - case (variable: Variable, (cell: DSC, internal: BigInt)) => - if cell.equals(oldCell) then - callSite.paramCells.update(variable, (newCell, internal + internalOffsetChange)) + case (variable: Variable, slice: Slice) => + if slice.cell.equals(oldCell) then + callSite.paramCells.update(variable, Slice(newCell, slice.internalOffset + internalOffsetChange)) } ) @@ -234,19 +234,19 @@ class DSG(val proc: Procedure, replaceInStack(oldCell, newCell) replaceInCallSites(oldCell, newCell, internalOffsetChange) - def getPointee(cell: DSC): (DSC, BigInt) = + def getPointee(cell: DSC): Slice = if !pointTo.contains(cell) then val node = DSN(Some(this)) - pointTo.update(cell, (node.cells(0), 0)) + pointTo.update(cell, Slice(node.cells(0), 0)) pointTo(cell) def getPointeeAdjusted(cell:DSC): DSC = val pointee = getPointee(cell) adjust(pointee) - def getCells(pos: CFGPosition, arg: Variable): Set[(DSC, BigInt)] = + def getCells(pos: CFGPosition, arg: Variable): Set[Slice] = if reachingDefs(pos).contains(arg) then - reachingDefs(pos)(arg).foldLeft(Set[(DSC, BigInt)]()) { + reachingDefs(pos)(arg).foldLeft(Set[Slice]()) { (s, defintion) => s + varToCell(defintion)(arg) } @@ -270,12 +270,12 @@ class DSG(val proc: Procedure, (c, field) => if pointTo.contains(field._2) && pointTo(field._2) == field._2 then - pointTo.update(field._2, (collapedCell, 0)) + pointTo.update(field._2, Slice(collapedCell, 0)) c else if pointTo.contains(field._2) then - val (pointeeCell, internalOffset) = getPointee(field._2) - if internalOffset > pointeeInternalOffset then - pointeeInternalOffset = internalOffset + val slice = getPointee(field._2) + if slice.internalOffset > pointeeInternalOffset then + pointeeInternalOffset = slice.internalOffset mergeCells(c, getPointeeAdjusted(field._2)) else c @@ -298,7 +298,7 @@ class DSG(val proc: Procedure, node.cells.clear() node.cells.addOne(0, collapedCell) if cell.node.isDefined then - pointTo.update(node.cells(0), (cell, pointeeInternalOffset)) + pointTo.update(node.cells(0), Slice(cell, pointeeInternalOffset)) def optionalCollapse(node: DSN): Unit = { var lastOffset: BigInt = -1 @@ -321,11 +321,11 @@ class DSG(val proc: Procedure, require(cell1.node.equals(cell2.node) && cell1.offset < cell2.offset) if pointTo.contains(cell2) then if pointTo.contains(cell1) then - val (cell1Pointee: DSC, pointee1Internal: BigInt) = getPointee(cell1) - val (cell2Pointee: DSC, pointee2Internal: BigInt) = getPointee(cell2) + val slice1 = getPointee(cell1) + val slice2 = getPointee(cell2) val result = mergeCells(getPointeeAdjusted(cell1), getPointeeAdjusted(cell2)) assert(pointTo(cell1)._1.equals(result)) - pointTo.update(cell1, (result,pointee2Internal.max(pointee1Internal))) + pointTo.update(cell1, Slice(result, slice2.internalOffset.max(slice1.internalOffset))) else pointTo.update(cell1, getPointee(cell2)) pointTo.remove(cell2) @@ -354,10 +354,10 @@ class DSG(val proc: Procedure, node2.flags.join(node1.flags) if pointTo.contains(node1.cells(0)) then if pointTo.contains(node2.cells(0)) then - val (pointee1: DSC, internal1: BigInt) = getPointee(node1.cells(0)) - val (pointee2: DSC, internal2: BigInt) = getPointee(node2.cells(0)) + val slice1 = getPointee(node1.cells(0)) + val slice2 = getPointee(node2.cells(0)) val result = mergeCells(getPointeeAdjusted(node1.cells(0)), getPointeeAdjusted(node2.cells(0))) - pointTo.update(node2.cells(0), (result, internal1.max(internal2))) + pointTo.update(node2.cells(0), Slice(result, slice1.internalOffset.max(slice2.internalOffset))) else pointTo.update(node2.cells(0), getPointee(node1.cells(0))) pointTo.remove(node1.cells(0)) @@ -409,7 +409,7 @@ class DSG(val proc: Procedure, resultCells.foreach { case (offset: BigInt, (cells: Set[DSC], largestAccess: BigInt)) => val collapsedCell = resultNode.addCell(offset, largestAccess) - val outgoing: Set[(DSC, BigInt)] = cells.foldLeft(Set[(DSC, BigInt)]()){ + val outgoing: Set[Slice] = cells.foldLeft(Set[Slice]()){ (set, cell) => // replace incoming edges if cell.node.get.equals(node2) then @@ -438,7 +438,7 @@ class DSG(val proc: Procedure, internal = internal.max(pointeeInternal) mergeCells(result, cell) } - pointTo.update(collapsedCell, (result, internal)) + pointTo.update(collapsedCell, Slice(result, internal)) } if cell1.offset >= cell2.offset then @@ -451,8 +451,8 @@ class DSG(val proc: Procedure, !reachingDefs(pos).contains(variable) - val formals: mutable.Map[Variable, (DSC, BigInt)] = mutable.Map() - val varToCell: mutable.Map[CFGPosition, mutable.Map[Variable, (DSC, BigInt)]] = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(mutable.Map[CFGPosition, mutable.Map[Variable, (DSC, BigInt)]]()) { + val formals: mutable.Map[Variable, Slice] = mutable.Map() + val varToCell: mutable.Map[CFGPosition, mutable.Map[Variable, Slice]] = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(mutable.Map[CFGPosition, mutable.Map[Variable, Slice]]()) { (m, pos) => pos match case Assign(variable, value , label) => @@ -462,18 +462,18 @@ class DSG(val proc: Procedure, val node = DSN(Some(this)) node.flags.incomplete = true nodes.add(node) - formals.update(v, (node.cells(0), 0)) + formals.update(v, Slice(node.cells(0), 0)) ) val node = DSN(Some(this)) - m +=(pos -> mutable.Map(variable -> (node.cells(0), 0))) + m +=(pos -> mutable.Map(variable -> Slice(node.cells(0), 0))) case DirectCall(proc, target, label) if proc.name == "malloc" => val node = DSN(Some(this)) - m += (pos -> mutable.Map(mallocRegister -> (node.cells(0), 0))) + m += (pos -> mutable.Map(mallocRegister -> Slice(node.cells(0), 0))) case DirectCall(proc, target, label) if writesTo.contains(proc) => - val result: Map[Variable, (DSC, BigInt)] = writesTo(proc).foldLeft(Map[Variable, (DSC, BigInt)]()){ + val result: Map[Variable, Slice] = writesTo(proc).foldLeft(Map[Variable, Slice]()){ (n, variable) => val node = DSN(Some(this)) - n + (variable -> (node.cells(0), 0)) + n + (variable -> Slice(node.cells(0), 0)) } m += (pos -> result.to(mutable.Map)) case MemoryAssign(memory, index: Expr, expr: Expr, endian, size: Int, label) if unwrapPaddingAndSlicing(expr).isInstanceOf[Variable] => @@ -482,7 +482,7 @@ class DSG(val proc: Procedure, val node = DSN(Some(this)) node.flags.incomplete = true nodes.add(node) - formals.update(value, (node.cells(0), 0)) + formals.update(value, Slice(node.cells(0), 0)) m case _ => m } @@ -492,28 +492,28 @@ class DSG(val proc: Procedure, assert(formals.size == newGraph.formals.size) val idToNode: mutable.Map[Int, DSN] = mutable.Map() formals.foreach{ - case (variable: Variable, (cell: DSC, internalOffset: BigInt)) => + case (variable: Variable, slice: Slice) => // assert(newGraph.formals.contains(variable)) - val node = cell.node.get + val node = slice.node if !idToNode.contains(node.id) then val newNode = node.cloneSelf(newGraph) idToNode.update(node.id, newNode) - newGraph.formals.update(variable, (idToNode(node.id).cells(cell.offset), internalOffset)) + newGraph.formals.update(variable, Slice(idToNode(node.id).cells(slice.offset), slice.internalOffset)) } varToCell.foreach { - case (position: CFGPosition, values: mutable.Map[Variable, (DSC, BigInt)]) => + case (position: CFGPosition, values: mutable.Map[Variable, Slice]) => // assert(newGraph.varToCell.contains(position)) if !newGraph.varToCell.contains(position) then - newGraph.varToCell.update(position, mutable.Map[Variable, (DSC, BigInt)]()) + newGraph.varToCell.update(position, mutable.Map[Variable, Slice]()) values.foreach{ - case (variable: Variable, (cell: DSC, internalOffset: BigInt)) => + case (variable: Variable, slice: Slice) => // assert(newGraph.varToCell(position).contains(variable)) - val node = cell.node.get + val node = slice.node if !idToNode.contains(node.id) then val newNode = node.cloneSelf(newGraph) idToNode.update(node.id, newNode) - newGraph.varToCell(position).update(variable, (idToNode(node.id).cells(cell.offset), internalOffset)) + newGraph.varToCell(position).update(variable, Slice(idToNode(node.id).cells(slice.offset), slice.internalOffset)) } } @@ -537,9 +537,9 @@ class DSG(val proc: Procedure, newGraph.pointTo.clear() pointTo.foreach { - case (cell1: DSC, (cell2: DSC, internalOffset: BigInt)) => + case (cell1: DSC, slice: Slice) => val node1 = cell1.node.get - val node2 = cell2.node.get + val node2 = slice.node if !idToNode.contains(node1.id) then val newNode1 = node1.cloneSelf(newGraph) idToNode.update(node1.id, newNode1) @@ -548,7 +548,7 @@ class DSG(val proc: Procedure, val newNode2 = node2.cloneSelf(newGraph) idToNode.update(node2.id, newNode2) - newGraph.pointTo.update(idToNode(node1.id).cells(cell1.offset), (idToNode(node2.id).cells(cell2.offset), internalOffset)) + newGraph.pointTo.update(idToNode(node1.id).cells(cell1.offset), Slice(idToNode(node2.id).cells(slice.offset), slice.internalOffset)) } callsites.foreach( @@ -557,17 +557,17 @@ class DSG(val proc: Procedure, newGraph.callsites.add(cs) assert(cs.paramCells.keySet.equals(callSite.paramCells.keySet)) callSite.paramCells.foreach{ - case (variable: Variable, (cell: DSC, internal: BigInt)) => + case (variable: Variable, slice: Slice) => assert(cs.paramCells.contains(variable)) - val id = cell.node.get.id - cs.paramCells.update(variable, (idToNode(id).cells(cell.offset), internal)) + val id = slice.node.id + cs.paramCells.update(variable, Slice(idToNode(id).cells(slice.offset), slice.internalOffset)) } callSite.returnCells.foreach{ - case (variable: Variable, (cell: DSC, internal: BigInt)) => + case (variable: Variable, slice: Slice) => assert(cs.returnCells.contains(variable)) - val id = cell.node.get.id - cs.returnCells.update(variable, (idToNode(id).cells(cell.offset), internal)) + val id = slice.node.id + cs.returnCells.update(variable, Slice(idToNode(id).cells(slice.offset), slice.internalOffset)) } ) @@ -669,22 +669,22 @@ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCount val pos = t._1 val varMap = t._2 varMap.foreach{ - case (variable: Variable, (cell: DSC, internal: BigInt)) => - if cell.node.get.equals(this) then + case (variable: Variable, slice: Slice) => + if slice.node.equals(this) then to.varToCell.update( pos, to.varToCell.getOrElseUpdate(pos, - mutable.Map[Variable, (DSC, BigInt)]()) ++ Map(variable -> (cell, internal)) + mutable.Map[Variable, Slice]()) ++ Map(variable -> slice) ) } ) from.formals.foreach{ - case (variable: Variable, (cell: DSC, internal: BigInt)) => - if cell.node.get.equals(this) then + case (variable: Variable, slice: Slice) => + if slice.node.equals(this) then to.varToCell.update( begin(from.proc), to.varToCell.getOrElseUpdate(begin(from.proc), - mutable.Map[Variable, (DSC, BigInt)]()) ++ Map(variable -> (cell, internal)) + mutable.Map[Variable, Slice]()) ++ Map(variable -> slice) ) } @@ -725,6 +725,15 @@ case class DSC(node: Option[DSN], offset: BigInt) override def toString: String = s"Cell(${if node.isDefined then node.get.toString else "NONE"}, $offset)" } + +/** + * a slice made from a cell and an internal offset + */ +case class Slice(cell: DSC, internalOffset: BigInt) { + def node: DSN = cell.node.get + def offset: BigInt = cell.offset +} + /** * represents a direct call in DSA * @param call instance of the call @@ -732,20 +741,21 @@ case class DSC(node: Option[DSN], offset: BigInt) */ class CallSite(val call: DirectCall, val graph: DSG) { val proc = call.target - val paramCells: mutable.Map[Variable, (DSC, BigInt)] = graph.params(proc).foldLeft(mutable.Map[Variable, (DSC, BigInt)]()) { + val paramCells: mutable.Map[Variable, Slice] = graph.params(proc).foldLeft(mutable.Map[Variable, Slice]()) { (m, reg) => val node = DSN(Some(graph)) node.flags.incomplete = true - m += (reg -> (node.cells(0), 0)) + m += (reg -> Slice(node.cells(0), 0)) } - val returnCells: mutable.Map[Variable, (DSC, BigInt)] = graph.writesTo(proc).foldLeft(mutable.Map[Variable, (DSC, BigInt)]()) { + val returnCells: mutable.Map[Variable, Slice] = graph.writesTo(proc).foldLeft(mutable.Map[Variable, Slice]()) { (m, reg) => val node = DSN(Some(graph)) node.flags.incomplete = true - m += (reg -> (node.cells(0), 0)) + m += (reg -> Slice(node.cells(0), 0)) } } + def unwrapPaddingAndSlicing(expr: Expr): Expr = expr match case literal: Literal => literal @@ -789,9 +799,9 @@ def adjust(cell: DSC, internalOffset: BigInt): DSC = val node = cell.node.get node.addCell(cell.offset+internalOffset, 0) -def adjust(tuple: (DSC, BigInt)): DSC = - val cell = tuple._1 - val internal = tuple._2 +def adjust(slice: Slice): DSC = + val cell = slice.cell + val internal = slice.internalOffset adjust(cell, internal) // minimum 2's complement 64 bit negative integer diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index d9cf894ee..f86cfe500 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -122,7 +122,7 @@ class Local( // visit all the defining pointer operation on rhs variable first reachingDefs(position)(rhs).foreach(visit) // get the cells of all the SSA variables in the set - val cells: Set[(DSC, BigInt)] = graph.getCells(position, rhs) + val cells: Set[Slice] = graph.getCells(position, rhs) // merge the cells or their pointees with lhs var result = cells.foldLeft(lhs) { (c, t) => @@ -176,7 +176,7 @@ class Local( if containsPointer then val cell = expr.variables.foldLeft(lhsCell) { (c, v) => - val cells: Set[(DSC, BigInt)] = graph.getCells(n, v) + val cells: Set[Slice] = graph.getCells(n, v) cells.foldLeft(c) { (c, p) => @@ -206,11 +206,11 @@ class Local( val cs = CallSite(call, graph) graph.callsites.add(cs) cs.paramCells.foreach{ - case (variable: Variable, slice: (DSC, BigInt)) => + case (variable: Variable, slice: Slice) => visitPointerArithmeticOperation(call, adjust(slice), variable, 0) } cs.returnCells.foreach{ - case (variable: Variable, slice: (DSC,BigInt)) => + case (variable: Variable, slice: Slice) => val returnArgument = graph.varToCell(n)(variable) graph.mergeCells(adjust(returnArgument), adjust(slice)) } From f600c6626a04badddbf08b2074ae368775e46d74 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 8 Jul 2024 14:41:45 +1000 Subject: [PATCH 016/104] separated function entries --- src/main/scala/analysis/DSA.scala | 6 +++--- src/main/scala/analysis/DSAUtility.scala | 6 +++--- src/main/scala/analysis/Local.scala | 6 +++--- .../scala/specification/Specification.scala | 13 +++++++++++-- src/main/scala/translating/ReadELFLoader.scala | 17 +++++++++++------ .../scala/translating/SpecificationLoader.scala | 2 +- src/main/scala/util/RunUtils.scala | 14 ++++++++------ src/test/scala/IndirectCallsTests.scala | 3 ++- src/test/scala/IrreducibleLoop.scala | 2 +- src/test/scala/LocalTest.scala | 8 ++++---- src/test/scala/PointsToTest.scala | 3 ++- src/test/scala/ir/InterpreterTests.scala | 2 +- 12 files changed, 50 insertions(+), 32 deletions(-) diff --git a/src/main/scala/analysis/DSA.scala b/src/main/scala/analysis/DSA.scala index efd92cdff..dfcd983d0 100644 --- a/src/main/scala/analysis/DSA.scala +++ b/src/main/scala/analysis/DSA.scala @@ -1,7 +1,7 @@ package analysis -import ir.{begin, BitVecLiteral, BitVecType, CFGPosition, CallGraph, Procedure, Program, Register, Variable, computeDomain, end} -import specification.{ExternalFunction, SpecGlobal} +import ir.{BitVecLiteral, BitVecType, CFGPosition, CallGraph, Procedure, Program, Register, Variable, begin, computeDomain, end} +import specification.{ExternalFunction, SpecGlobal, SymbolTableEntry} import scala.collection.mutable @@ -22,7 +22,7 @@ import scala.collection.mutable class DSA(program: Program, symResults: Map[CFGPosition, Map[SymbolicAccess, TwoElement]], constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - globals: Set[SpecGlobal], globalOffsets: Map[BigInt, BigInt], + globals: Set[SymbolTableEntry], globalOffsets: Map[BigInt, BigInt], externalFunctions: Set[ExternalFunction], reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], writesTo: Map[Procedure, Set[Register]], diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index 33641590c..69c59dabc 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -1,7 +1,7 @@ package analysis -import ir.{BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IntraProcIRCursor, Literal, Assign, Memory, MemoryAssign, MemoryLoad, Procedure, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, begin, computeDomain, toShortString} -import specification.{ExternalFunction, SpecGlobal} +import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IntraProcIRCursor, Literal, Memory, MemoryAssign, MemoryLoad, Procedure, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, begin, computeDomain, toShortString} +import specification.{ExternalFunction, SpecGlobal, SymbolTableEntry} import scala.util.control.Breaks.{break, breakable} import java.math.BigInteger @@ -31,7 +31,7 @@ object NodeCounter { class DSG(val proc: Procedure, constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], varToSym: Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]], - globals: Set[SpecGlobal], globalOffsets: Map[BigInt, BigInt], + globals: Set[SymbolTableEntry], globalOffsets: Map[BigInt, BigInt], externalFunctions: Set[ExternalFunction], val reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], val writesTo: Map[Procedure, Set[Register]], diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index f86cfe500..59e4a5edd 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -1,7 +1,7 @@ package analysis -import ir.{BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Endian, Expr, Extract, IntraProcIRCursor, Assign, MemoryAssign, MemoryLoad, Procedure, Register, Variable, ZeroExtend, computeDomain, toShortString} -import specification.{ExternalFunction, SpecGlobal} +import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Endian, Expr, Extract, IntraProcIRCursor, MemoryAssign, MemoryLoad, Procedure, Register, Variable, ZeroExtend, computeDomain, toShortString} +import specification.{ExternalFunction, SpecGlobal, SymbolTableEntry} import scala.util.control.Breaks.{break, breakable} import java.math.BigInteger @@ -23,7 +23,7 @@ class Local( proc: Procedure, symResults: Map[CFGPosition, Map[SymbolicAccess, TwoElement]], constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - globals: Set[SpecGlobal], globalOffsets: Map[BigInt, BigInt], + globals: Set[SymbolTableEntry], globalOffsets: Map[BigInt, BigInt], externalFunctions: Set[ExternalFunction], reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], writesTo: Map[Procedure, Set[Register]], diff --git a/src/main/scala/specification/Specification.scala b/src/main/scala/specification/Specification.scala index 6ae8d8cfe..e99dc5999 100644 --- a/src/main/scala/specification/Specification.scala +++ b/src/main/scala/specification/Specification.scala @@ -17,8 +17,16 @@ trait SpecGlobalOrAccess extends SpecVar { val size: Int } -case class SpecGlobal(name: String, override val size: Int, arraySize: Option[Int], address: BigInt) - extends SpecGlobalOrAccess { +trait SymbolTableEntry{ + val name: String + val size: Int + val address: BigInt +} + + +case class FuncEntry(override val name: String, override val size: Int, override val address: BigInt) extends SymbolTableEntry +case class SpecGlobal(override val name: String, override val size: Int, arraySize: Option[Int], override val address: BigInt) + extends SymbolTableEntry, SpecGlobalOrAccess { override def specGlobals: Set[SpecGlobalOrAccess] = Set(this) override val toAddrVar: BVar = BVariable("$" + s"${name}_addr", BitVecBType(64), Scope.Const) override val toOldVar: BVar = BVariable(s"${name}_old", BitVecBType(size), Scope.Local) @@ -153,6 +161,7 @@ case class ArrayAccess(global: SpecGlobal, index: Int) extends SpecGlobalOrAcces } case class Specification( + funcs: Set[FuncEntry], globals: Set[SpecGlobal], LPreds: Map[SpecGlobal, BExpr], relies: List[BExpr], diff --git a/src/main/scala/translating/ReadELFLoader.scala b/src/main/scala/translating/ReadELFLoader.scala index 60fac28bf..388819be0 100644 --- a/src/main/scala/translating/ReadELFLoader.scala +++ b/src/main/scala/translating/ReadELFLoader.scala @@ -7,15 +7,17 @@ import util.ILLoadingConfig import scala.jdk.CollectionConverters.* object ReadELFLoader { - def visitSyms(ctx: SymsContext, config: ILLoadingConfig): (Set[ExternalFunction], Set[SpecGlobal], Map[BigInt, BigInt], Int) = { + def visitSyms(ctx: SymsContext, config: ILLoadingConfig): (Set[ExternalFunction], Set[SpecGlobal], Set[FuncEntry], Map[BigInt, BigInt], Int) = { val externalFunctions = ctx.relocationTable.asScala.flatMap(r => visitRelocationTableExtFunc(r)).toSet val relocationOffsets = ctx.relocationTable.asScala.flatMap(r => visitRelocationTableOffsets(r)).toMap - val globalVariables = ctx.symbolTable.asScala.flatMap(s => visitSymbolTable(s)).toSet + val symbolTableEntries = ctx.symbolTable.asScala.flatMap(s => visitSymbolTable(s)).toSet + val globalVariables: Set[SpecGlobal] = symbolTableEntries.filter(_.isInstanceOf[SpecGlobal]).map(_.asInstanceOf[SpecGlobal]) + val functionEntries: Set[FuncEntry] = symbolTableEntries.filter(_.isInstanceOf[FuncEntry]).map(_.asInstanceOf[FuncEntry]) val mainAddress = ctx.symbolTable.asScala.flatMap(s => getFunctionAddress(s, config.mainProcedureName)) if (mainAddress.isEmpty) { throw Exception(s"no ${config.mainProcedureName} function in symbol table") } - (externalFunctions, globalVariables, relocationOffsets, mainAddress.head) + (externalFunctions, globalVariables, functionEntries, relocationOffsets, mainAddress.head) } def visitRelocationTableExtFunc(ctx: RelocationTableContext): Set[ExternalFunction] = { @@ -49,7 +51,7 @@ object ReadELFLoader { } } - def visitSymbolTable(ctx: SymbolTableContext): Set[SpecGlobal] = { + def visitSymbolTable(ctx: SymbolTableContext): Set[SymbolTableEntry] = { if (ctx.symbolTableHeader.tableName.STRING.getText == ".symtab") { val rows = ctx.symbolTableRow.asScala rows.flatMap(r => visitSymbolTableRow(r)).toSet @@ -75,11 +77,14 @@ object ReadELFLoader { } } - def visitSymbolTableRow(ctx: SymbolTableRowContext): Option[SpecGlobal] = { + def visitSymbolTableRow(ctx: SymbolTableRowContext): Option[SymbolTableEntry] = { if ((ctx.entrytype.getText == "OBJECT" || ctx.entrytype.getText == "FUNC") && ctx.bind.getText == "GLOBAL" && ctx.vis.getText == "DEFAULT") { val name = ctx.name.getText if (name.forall(allowedChars.contains)) { - Some(SpecGlobal(name, ctx.size.getText.toInt * 8, None, hexToBigInt(ctx.value.getText))) + ctx.entrytype.getText match + case "OBJECT" => Some(SpecGlobal(name, ctx.size.getText.toInt * 8, None, hexToBigInt(ctx.value.getText))) + case "FUNC" => Some(FuncEntry(name, ctx.size.getText.toInt * 8, hexToBigInt(ctx.value.getText))) + case _ => None } else { None } diff --git a/src/main/scala/translating/SpecificationLoader.scala b/src/main/scala/translating/SpecificationLoader.scala index 1d9b82336..3f5156ce4 100644 --- a/src/main/scala/translating/SpecificationLoader.scala +++ b/src/main/scala/translating/SpecificationLoader.scala @@ -43,7 +43,7 @@ case class SpecificationLoader(symbols: Set[SpecGlobal], program: Program) { case Some(_) => visitDirectFunctions(ctx.directFunctions) case None => Set() } - Specification(globals, lPreds, relies, guarantees, subroutines, directFunctions) + Specification(Set(), globals, lPreds, relies, guarantees, subroutines, directFunctions) } def visitDirectFunctions(ctx: DirectFunctionsContext): Set[FunctionOp] = { diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index b3cc60bab..9e0bf23ee 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -41,6 +41,7 @@ import scala.collection.mutable case class IRContext( externalFunctions: Set[ExternalFunction], globals: Set[SpecGlobal], + funcEntries: Set[FuncEntry], globalOffsets: Map[BigInt, BigInt], specification: Specification, program: Program // internally mutable @@ -77,13 +78,13 @@ object IRLoading { /** Create a context from just an IR program. */ def load(p: Program): IRContext = { - IRContext(Set.empty, Set.empty, Map.empty, IRLoading.loadSpecification(None, p, Set.empty), p) + IRContext(Set.empty, Set.empty, Set.empty, Map.empty, IRLoading.loadSpecification(None, p, Set.empty), p) } /** Load a program from files using the provided configuration. */ def load(q: ILLoadingConfig): IRContext = { - val (externalFunctions, globals, globalOffsets, mainAddress) = IRLoading.loadReadELF(q.relfFile, q) + val (externalFunctions, globals, funcEntries, globalOffsets, mainAddress) = IRLoading.loadReadELF(q.relfFile, q) val program: Program = if (q.inputFile.endsWith(".adt")) { val bapProgram = loadBAP(q.inputFile) @@ -97,7 +98,7 @@ object IRLoading { val specification = IRLoading.loadSpecification(q.specFile, program, globals) - IRContext(externalFunctions, globals, globalOffsets, specification, program) + IRContext(externalFunctions, globals, funcEntries, globalOffsets, specification, program) } def loadBAP(fileName: String): BAPProgram = { @@ -155,7 +156,7 @@ object IRLoading { def loadReadELF( fileName: String, config: ILLoadingConfig - ): (Set[ExternalFunction], Set[SpecGlobal], Map[BigInt, BigInt], Int) = { + ): (Set[ExternalFunction], Set[SpecGlobal], Set[FuncEntry], Map[BigInt, BigInt], Int) = { val lexer = ReadELFLexer(CharStreams.fromFileName(fileName)) val tokens = CommonTokenStream(lexer) val parser = ReadELFParser(tokens) @@ -172,7 +173,7 @@ object IRLoading { specParser.setBuildParseTree(true) val specLoader = SpecificationLoader(globals, program) specLoader.visitSpecification(specParser.specification()) - case None => Specification(globals, Map(), List(), List(), List(), Set()) + case None => Specification(Set(), globals, Map(), List(), List(), List(), Set()) } } } @@ -989,7 +990,8 @@ object RunUtils { val paramResults = ParamAnalysis(ctx.program).analyze() Logger.info("[!] Running DSA Analysis") - val dsa = DSA(ctx.program, symResults, analysisResult.last.IRconstPropResult, ctx.globals, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, paramResults) + val symbolTableEntries: Set[SymbolTableEntry] = ctx.globals ++ ctx.funcEntries + val dsa = DSA(ctx.program, symResults, analysisResult.last.IRconstPropResult, symbolTableEntries, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, paramResults) dsa.analyze() Logger.info(s"[!] Finished indirect call resolution after $iteration iterations") diff --git a/src/test/scala/IndirectCallsTests.scala b/src/test/scala/IndirectCallsTests.scala index 60f7fdd67..12931793a 100644 --- a/src/test/scala/IndirectCallsTests.scala +++ b/src/test/scala/IndirectCallsTests.scala @@ -42,9 +42,10 @@ class IndirectCallsTests extends AnyFunSuite with OneInstancePerTest with Before def runAnalyses(program: Program, externalFunctions: Set[ExternalFunction] = Set.empty, globals: Set[SpecGlobal] = Set.empty, + funcEntries: Set[FuncEntry] = Set.empty, globalOffsets: Map[BigInt, BigInt] = Map.empty): StaticAnalysisContext = { - val ctx = IRContext(externalFunctions, globals, globalOffsets, Specification(Set(), Map(), List(), List(), List(), Set()), program) + val ctx = IRContext(externalFunctions, globals, funcEntries, globalOffsets, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program) StaticAnalysis.analyse(ctx, StaticAnalysisConfig(), 1) } diff --git a/src/test/scala/IrreducibleLoop.scala b/src/test/scala/IrreducibleLoop.scala index c7b0a4279..0bc06b7b6 100644 --- a/src/test/scala/IrreducibleLoop.scala +++ b/src/test/scala/IrreducibleLoop.scala @@ -27,7 +27,7 @@ class IrreducibleLoop extends AnyFunSuite { def load(conf: ILLoadingConfig) : Program = { val bapProgram = IRLoading.loadBAP(conf.inputFile) - val (externalFunctions, globals, globalOffsets, mainAddress) = IRLoading.loadReadELF(conf.relfFile, conf) + val (externalFunctions, globals, funcEntries, globalOffsets, mainAddress) = IRLoading.loadReadELF(conf.relfFile, conf) val IRTranslator = BAPToIR(bapProgram, mainAddress) val IRProgram = IRTranslator.translate IRProgram diff --git a/src/test/scala/LocalTest.scala b/src/test/scala/LocalTest.scala index 508bd635f..ad95f12b7 100644 --- a/src/test/scala/LocalTest.scala +++ b/src/test/scala/LocalTest.scala @@ -330,7 +330,7 @@ class LocalTest extends AnyFunSuite, TestUtil { val returnUnifier = ConvertToSingleProcedureReturn() program = returnUnifier.visitProgram(program) - val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Map.empty, Specification(Set(), Map(), List(), List(), List(), Set()), program)) + val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) val dsg: DSG = results.locals.get(program.mainProcedure) assert(dsg.formals(R1).equals(dsg.formals(R2))) assert(dsg.varToCell(locAssign1)(R6)._1.equals(dsg.varToCell(locAssign2)(R7)._1)) @@ -364,7 +364,7 @@ class LocalTest extends AnyFunSuite, TestUtil { val returnUnifier = ConvertToSingleProcedureReturn() program = returnUnifier.visitProgram(program) - val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Map.empty, Specification(Set(), Map(), List(), List(), List(), Set()), program)) + val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) val dsg: DSG = results.locals.get(program.mainProcedure) assert(dsg.varToCell(locAssign3)(R5)._1.offset == 13) } @@ -392,7 +392,7 @@ class LocalTest extends AnyFunSuite, TestUtil { val returnUnifier = ConvertToSingleProcedureReturn() program = returnUnifier.visitProgram(program) - val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Map.empty, Specification(Set(), Map(), List(), List(), List(), Set()), program)) + val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) val dsg: DSG = results.locals.get(program.mainProcedure) assert(dsg.formals(R1).equals(dsg.formals(R2))) assert(dsg.varToCell(locAssign1)(R6)._1.equals(dsg.varToCell(locAssign2)(R7)._1)) @@ -428,7 +428,7 @@ class LocalTest extends AnyFunSuite, TestUtil { val returnUnifier = ConvertToSingleProcedureReturn() program = returnUnifier.visitProgram(program) - val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Map.empty, Specification(Set(), Map(), List(), List(), List(), Set()), program)) + val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) val dsg: DSG = results.locals.get(program.mainProcedure) assert(dsg.varToCell(locAssign2)(R7).equals(dsg.varToCell(locAssign3)(R5))) } diff --git a/src/test/scala/PointsToTest.scala b/src/test/scala/PointsToTest.scala index 32131ed46..e170eed6b 100644 --- a/src/test/scala/PointsToTest.scala +++ b/src/test/scala/PointsToTest.scala @@ -40,9 +40,10 @@ class PointsToTest extends AnyFunSuite with OneInstancePerTest with BeforeAndAft def runAnalyses(program: Program, externalFunctions: Set[ExternalFunction] = Set.empty, globals: Set[SpecGlobal] = Set.empty, + funcEntries: Set[FuncEntry] = Set.empty, globalOffsets: Map[BigInt, BigInt] = Map.empty): StaticAnalysisContext = { - val ctx = IRContext(externalFunctions, globals, globalOffsets, Specification(Set(), Map(), List(), List(), List(), Set()), program) + val ctx = IRContext(externalFunctions, globals, funcEntries, globalOffsets, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program) StaticAnalysis.analyse(ctx, StaticAnalysisConfig(), 1) } diff --git a/src/test/scala/ir/InterpreterTests.scala b/src/test/scala/ir/InterpreterTests.scala index 47cf65e3c..df8c5851e 100644 --- a/src/test/scala/ir/InterpreterTests.scala +++ b/src/test/scala/ir/InterpreterTests.scala @@ -25,7 +25,7 @@ class InterpreterTests extends AnyFunSuite with BeforeAndAfter { ) val bapProgram = loadBAP(loading.inputFile) - val (externalFunctions, globals, _, mainAddress) = loadReadELF(loading.relfFile, loading) + val (externalFunctions, globals, funcEntries, _, mainAddress) = loadReadELF(loading.relfFile, loading) val IRTranslator = BAPToIR(bapProgram, mainAddress) var IRProgram = IRTranslator.translate IRProgram = ExternalRemover(externalFunctions.map(e => e.name)).visitProgram(IRProgram) From 3526de8e446f4257e731226afb3bf13c033eb871 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 15 Jul 2024 09:39:06 +1000 Subject: [PATCH 017/104] added bv to signed integer --- src/main/scala/analysis/BitVectorEval.scala | 26 +++++++++++++++++++ src/main/scala/analysis/DSAUtility.scala | 26 ------------------- src/main/scala/analysis/Local.scala | 3 ++- .../analysis/SymbolicAccessAnalysis.scala | 5 ++-- 4 files changed, 31 insertions(+), 29 deletions(-) diff --git a/src/main/scala/analysis/BitVectorEval.scala b/src/main/scala/analysis/BitVectorEval.scala index 0b5847506..bad945270 100644 --- a/src/main/scala/analysis/BitVectorEval.scala +++ b/src/main/scala/analysis/BitVectorEval.scala @@ -24,6 +24,32 @@ object BitVectorEval { */ def bv2nat(b: BitVecLiteral): BigInt = b.value + /** + * Converts a bitvector value to its corresponding signed integer + */ + def bv2int(b: BitVecLiteral): BigInt = + val binaryNum: Array[Int] = new Array[Int](b.size) + var i = 0 + var num = b.value + while (num > 0) { + binaryNum(i) = (num % BigInt(2)).intValue + num = num / 2 + i += 1 + } + + var result: BigInt = BigInt(0) + var counter: Int = 0 + binaryNum.foreach( + n => + if counter == binaryNum.length - 1 && n == 1 then + result = result - BigInt(2).pow(counter) + else if n == 1 then + result = result + BigInt(2).pow(counter) + counter += 1 + ) + result + + /** (bvadd (_ BitVec m) (_ BitVec m) (_ BitVec m)) * - addition modulo 2^m * diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index 69c59dabc..3f28c5f2a 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -769,32 +769,6 @@ def unwrapPaddingAndSlicing(expr: Expr): Expr = case ZeroExtend(extension, body) => unwrapPaddingAndSlicing(body) case _ => expr -def decToBinary(n: BigInt): Array[Int] = { - val binaryNum: Array[Int] = new Array[Int](64) - var i = 0 - var num = n - while (num > 0) { - binaryNum(i) = (num % BigInt(2)).intValue - num = num / 2 - i += 1 - } - binaryNum -} - -def twosComplementToDec(binary: Array[Int]): BigInt = { - var result: BigInt = BigInt(0) - var counter: Int = 0 - binary.foreach( - n => - if counter == binary.length - 1 && n == 1 then - result = result - BigInt(2).pow(counter) - else if n == 1 then - result = result + BigInt(2).pow(counter) - counter += 1 - ) - result -} - def adjust(cell: DSC, internalOffset: BigInt): DSC = val node = cell.node.get node.addCell(cell.offset+internalOffset, 0) diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index 59e4a5edd..06eb381ba 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -1,5 +1,6 @@ package analysis +import analysis.BitVectorEval.bv2int import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Endian, Expr, Extract, IntraProcIRCursor, MemoryAssign, MemoryLoad, Procedure, Register, Variable, ZeroExtend, computeDomain, toShortString} import specification.{ExternalFunction, SpecGlobal, SymbolTableEntry} @@ -230,7 +231,7 @@ class Local( if op.equals(BVADD) && arg1.equals(stackPointer) && arg2Offset.isDefined && arg2Offset.get.value >= BITVECNEGATIVE then - val size = twosComplementToDec(decToBinary(evaluateExpression(arg2, constProp(n)).get.value)) + val size = bv2int(arg2Offset.get) val node = DSN(Some(graph)) node.allocationRegions.add(StackLocation("Stack_"+proc.name, proc, -size)) node.flags.stack = true diff --git a/src/main/scala/analysis/SymbolicAccessAnalysis.scala b/src/main/scala/analysis/SymbolicAccessAnalysis.scala index a3ea99a59..85ceb4c75 100644 --- a/src/main/scala/analysis/SymbolicAccessAnalysis.scala +++ b/src/main/scala/analysis/SymbolicAccessAnalysis.scala @@ -1,8 +1,9 @@ package analysis +import analysis.BitVectorEval.bv2int import analysis.solvers.ForwardIDESolver import ir.IRWalk.procedure -import ir.{BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, GoTo, IndirectCall, Literal, Assign, Memory, MemoryLoad, Procedure, Program, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend} +import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, GoTo, IndirectCall, Literal, Memory, MemoryLoad, Procedure, Program, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend} import java.math.BigInteger @@ -102,7 +103,7 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem case Left(value) if value.accessor == variable => Map() case Left(value) => Map(d -> IdEdge()) case Right(_) => - val size = twosComplementToDec(decToBinary(v.value)) + val size = bv2int(v) Map(d -> IdEdge(), Left(SymbolicAccess(variable, StackLocation(s"Stack_${procedure(n).name}", procedure(n), -size), 0)) -> ConstEdge(TwoElementTop)) else d match From c806362d0f7e867968c39808c2bc559eb06543cd Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 29 Jul 2024 11:35:58 +1000 Subject: [PATCH 018/104] readability --- src/main/scala/analysis/BitVectorEval.scala | 26 +- src/main/scala/analysis/DSA.scala | 18 +- src/main/scala/analysis/DSAUtility.scala | 166 +++++++---- src/main/scala/analysis/Local.scala | 61 ++-- .../analysis/SymbolicAccessAnalysis.scala | 6 +- src/test/scala/LocalTest.scala | 260 +++++++++--------- 6 files changed, 297 insertions(+), 240 deletions(-) diff --git a/src/main/scala/analysis/BitVectorEval.scala b/src/main/scala/analysis/BitVectorEval.scala index bad945270..4083ee470 100644 --- a/src/main/scala/analysis/BitVectorEval.scala +++ b/src/main/scala/analysis/BitVectorEval.scala @@ -27,27 +27,11 @@ object BitVectorEval { /** * Converts a bitvector value to its corresponding signed integer */ - def bv2int(b: BitVecLiteral): BigInt = - val binaryNum: Array[Int] = new Array[Int](b.size) - var i = 0 - var num = b.value - while (num > 0) { - binaryNum(i) = (num % BigInt(2)).intValue - num = num / 2 - i += 1 - } - - var result: BigInt = BigInt(0) - var counter: Int = 0 - binaryNum.foreach( - n => - if counter == binaryNum.length - 1 && n == 1 then - result = result - BigInt(2).pow(counter) - else if n == 1 then - result = result + BigInt(2).pow(counter) - counter += 1 - ) - result + def bv2SignedInt(b: BitVecLiteral): BigInt = + if isNegative(b) then + b.value - BigInt(2).pow(b.size) + else + b.value /** (bvadd (_ BitVec m) (_ BitVec m) (_ BitVec m)) diff --git a/src/main/scala/analysis/DSA.scala b/src/main/scala/analysis/DSA.scala index dfcd983d0..91b645140 100644 --- a/src/main/scala/analysis/DSA.scala +++ b/src/main/scala/analysis/DSA.scala @@ -56,12 +56,14 @@ class DSA(program: Program, val stack : mutable.Stack[Procedure] = mutable.Stack() stack.pushAll(program.mainProcedure.calls) + // calculate the procedures used in the program while stack.nonEmpty do val current = stack.pop() domain += current stack.pushAll(current.calls.diff(domain)) + // perform local analysis on all procs domain.foreach( proc => val dsg = Local(proc, symResults, constProp, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params).analyze() @@ -80,13 +82,14 @@ class DSA(program: Program, queue.enqueueAll(CallGraph.pred(proc).diff(visited).intersect(domain)) ) + // bottom up phase while queue.nonEmpty do val proc = queue.dequeue() visited += proc queue.enqueueAll(CallGraph.pred(proc).diff(visited)) val buGraph = bu(proc) - // it should be fine - buGraph.callsites.foreach( // clone all the nodes first + + buGraph.callsites.foreach( callSite => val callee = callSite.proc val calleeGraph = locals(callee) //.cloneSelf() @@ -94,7 +97,7 @@ class DSA(program: Program, assert(calleeGraph.formals.keySet.diff(ignoreRegisters).equals(callSite.paramCells.keySet)) calleeGraph.globalMapping.foreach { - case (range: (BigInt, BigInt), (node: DSN, internal: BigInt)) => + case (range: AddressRange, Field(node, offset)) => node.cloneNode(calleeGraph, buGraph) } @@ -122,9 +125,9 @@ class DSA(program: Program, // assert(calleeGraph.formals.isEmpty || buGraph.varToCell(begin(callee)).equals(calleeGraph.formals)) val globalNodes: mutable.Map[Int, DSN] = mutable.Map() calleeGraph.globalMapping.foreach { - case (range: (BigInt, BigInt), (node: DSN, internal: BigInt)) => + case (range: AddressRange, Field(node: DSN, offset: BigInt)) => buGraph.mergeCells(buGraph.globalMapping(range)._1.getCell(buGraph.globalMapping(range)._2), - node.getCell(internal)) + node.getCell(offset)) } buGraph.varToCell.getOrElse(begin(callee), Map.empty).foreach{ @@ -155,6 +158,7 @@ class DSA(program: Program, visited = Set() + // top-down phase while queue.nonEmpty do val proc = queue.dequeue() visited += proc @@ -167,7 +171,7 @@ class DSA(program: Program, assert(callersGraph.globalMapping.keySet.equals(calleesGraph.globalMapping.keySet)) callersGraph.globalMapping.foreach { - case (range: (BigInt, BigInt), (node: DSN, internal: BigInt)) => + case (range: AddressRange, Field(node, offset)) => node.cloneNode(callersGraph, calleesGraph) } @@ -186,7 +190,7 @@ class DSA(program: Program, callersGraph.globalMapping.foreach { - case (range: (BigInt, BigInt), (node: DSN, internal: BigInt)) => + case (range: AddressRange, Field(node, internal)) => calleesGraph.mergeCells(calleesGraph.globalMapping(range)._1.getCell(calleesGraph.globalMapping(range)._2), node.getCell(internal)) } diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index 3f28c5f2a..90fd2e595 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -15,7 +15,6 @@ object NodeCounter { counter } - /** * Data Structure Graph for DSA * @param proc procedure of DSG @@ -37,40 +36,46 @@ class DSG(val proc: Procedure, val writesTo: Map[Procedure, Set[Register]], val params: Map[Procedure, Set[Variable]] ) { - // DSNodes owned by this graph + + // DSNodes owned by this graph, only updated once analysis is done, val nodes: mutable.Set[DSN] = mutable.Set() + + // this is mapping of point-relations in the graph val pointTo: mutable.Map[DSC, Slice] = mutable.Map() + + // represent callees in proc val callsites: mutable.Set[CallSite] = mutable.Set() val mallocRegister = Register("R0", 64) val stackPointer = Register("R31", 64) - // make stack nodes with + // this is the mapping from offsets/positions on the stack to their representative DS nodes val stackMapping: mutable.Map[BigInt, DSN] = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(Map[BigInt, DSN]()) { (results, pos) => stackBuilder(pos, results) }.to(collection.mutable.Map) + + /** + * this function takes a stackMapping and updates it based on a memory load and memory store + * @param pos memory load or store IL position + * @param index memory location of load or store + * @param size size of the load or store + * @param m stack mapping + * @return updated stack mapping + */ private def visitStackAccess(pos: CFGPosition, index: Expr, size: Int, m: Map[BigInt, DSN]) : Map[BigInt, DSN] = val byteSize = (size.toDouble / 8).ceil.toInt index match case BinaryExpr(op, arg1: Variable, arg2) if varToSym.contains(pos) && varToSym(pos).contains(arg1) && evaluateExpression(arg2, constProp(pos)).isDefined => var offset = evaluateExpression(arg2, constProp(pos)).get.value - varToSym(pos)(arg1).foldLeft(m) { + varToSym(pos)(arg1).foldLeft(m) { // go through all the symbolic accesses tied to arg1 at pos (m, sym) => sym match - case SymbolicAccess(accessor, StackLocation(regionIdentifier, proc, size), symOffset) => + case SymbolicAccess(accessor, StackLocation(regionIdentifier, proc, size), symOffset) => // only consider stack accesses offset = offset + symOffset - if m.contains(offset) then - assert(!m(offset).cells(0).growSize(byteSize)) - m - else - val node = DSN(Some(this), byteSize) - node.allocationRegions.add(StackLocation(pos.toShortString, proc, byteSize)) - node.flags.stack = true - node.addCell(0, byteSize) - m + (offset -> node) + createStackMapping(pos.toShortString, offset, m, byteSize) case _ => m } case arg: Variable if varToSym.contains(pos) && varToSym(pos).contains(arg) => @@ -78,18 +83,11 @@ class DSG(val proc: Procedure, (m, sym) => sym match case SymbolicAccess(accessor, StackLocation(regionIdentifier, proc, size), offset) => - if m.contains(offset) then - assert(!m(offset).cells(0).growSize(byteSize)) - m - else - val node = DSN(Some(this), byteSize) - node.allocationRegions.add(StackLocation(pos.toShortString, proc, byteSize)) - node.flags.stack = true - node.addCell(0, byteSize) - m + (offset -> node) + createStackMapping(pos.toShortString, offset, m, byteSize) case _ => m } case _ => m + private def stackBuilder(pos: CFGPosition, m: Map[BigInt, DSN]): Map[BigInt, DSN] = { pos match case Assign(variable: Variable, expr: Expr, _) => @@ -103,19 +101,33 @@ class DSG(val proc: Procedure, } + private def createStackMapping(label: String, offset: BigInt, m: Map[BigInt, DSN], byteSize: Int) : Map[BigInt, DSN]= + if m.contains(offset) then + assert(!m(offset).cells(0).growSize(byteSize)) + m + else + val node = DSN(Some(this), byteSize) + node.allocationRegions.add(StackLocation(label, proc, byteSize)) + node.flags.stack = true + node.addCell(0, byteSize) + m + (offset -> node) + - // make all globals private val swappedOffsets = globalOffsets.map(_.swap) - val globalMapping: mutable.Map[(BigInt, BigInt), (DSN, BigInt)] = mutable.Map[(BigInt, BigInt), (DSN, BigInt)]() + + // creates the globals from the symbol tables + val globalMapping: mutable.Map[AddressRange, Field] = mutable.Map[AddressRange, Field]() globals.foreach( global => val node = DSN(Some(this), global.size) node.allocationRegions.add(DataLocation(global.name, global.address, global.size/8)) node.flags.global = true node.flags.incomplete = true - globalMapping.update((global.address, global.address + global.size/8), (node, 0)) + globalMapping.update(AddressRange(global.address, global.address + global.size/8), Field(node, 0)) ) + // creates a global for each relocation entry in the symbol table + // the global corresponding to the relocated address points to the global corresponding to the original address globals.foreach( global => var address = global.address @@ -138,7 +150,7 @@ class DSG(val proc: Procedure, node.allocationRegions.add(DataLocation(s"Relocated_$relocatedAddress", relocatedAddress, 8)) node.flags.global = true node.flags.incomplete = true - globalMapping.update((relocatedAddress, relocatedAddress + 8), (node, 0)) + globalMapping.update(AddressRange(relocatedAddress, relocatedAddress + 8), Field(node, 0)) node pointTo.update(node.cells(field), Slice(isGlobal(address).get._2._1.cells(0), 0)) @@ -152,18 +164,18 @@ class DSG(val proc: Procedure, node.allocationRegions.add(DataLocation(external.name, external.offset, 0)) node.flags.global = true node.flags.incomplete = true - globalMapping.update((external.offset, external.offset), (node, 0)) + globalMapping.update(AddressRange(external.offset, external.offset), Field(node, 0)) ) // determine if an address is a global and return the corresponding global if it is. - def isGlobal(address: BigInt): Option[((BigInt, BigInt), (DSN, BigInt))] = - var global: Option[((BigInt, BigInt), (DSN, BigInt))] = None + def isGlobal(address: BigInt): Option[(AddressRange, Field)] = + var global: Option[(AddressRange, Field)] = None breakable { for (elem <- globalMapping) { - val range = elem._1 - if address >= range._1 && (address < range._2 || (range._1 == range._2 && range._2 == address)) then + val range = elem._1 // TODO + if address >= range.start && (address < range.end || (range.start == range.end && range.end == address)) then global = Some(elem) break } @@ -196,11 +208,9 @@ class DSG(val proc: Procedure, private def replaceInGlobals(oldCell: DSC, newCell: DSC) = if oldCell.node.isDefined then globalMapping.foreach { - case (key, tuple) => - val node = tuple._1 - val offset = tuple._2 + case (key, Field(node, offset)) => if node.equals(oldCell.node.get) then - globalMapping.update(key, (newCell.node.get, offset)) + globalMapping.update(key, Field(newCell.node.get, offset)) } private def replaceInStack(oldCell: DSC, newCell: DSC) = @@ -226,7 +236,10 @@ class DSG(val proc: Procedure, callSite.paramCells.update(variable, Slice(newCell, slice.internalOffset + internalOffsetChange)) } ) - + + + // replaces an old cell with a new cell in all the mappings and updates their slice offset if applicable + // This is inefficient looking to replace it with a union-find approach def replace(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = replaceInEV(oldCell, newCell, internalOffsetChange) replaceInPointTo(oldCell, newCell, internalOffsetChange) @@ -253,6 +266,9 @@ class DSG(val proc: Procedure, else Set(formals(arg)) + /** + * collects all the nodes that are currently in the DSG and updates nodes member variable + */ def collectNodes = nodes.clear() nodes.addAll(formals.values.map(_._1.node.get)) @@ -261,6 +277,10 @@ class DSG(val proc: Procedure, ) nodes.addAll(stackMapping.values) nodes.addAll(globalMapping.values.map(_._1)) + + /** + * Collapses the node causing it to lose field sensitivity + */ def collapseNode(node: DSN): Unit = val collapedCell = DSC(Option(node), 0) val e = DSC(None, 0) @@ -300,7 +320,11 @@ class DSG(val proc: Procedure, if cell.node.isDefined then pointTo.update(node.cells(0), Slice(cell, pointeeInternalOffset)) - def optionalCollapse(node: DSN): Unit = { + /** + * this function merges all the overlapping cells in the given node + * The node DOESN'T lose field sensitivity after this + */ + def selfCollapse(node: DSN): Unit = { var lastOffset: BigInt = -1 var lastAccess: BigInt = -1 val removed = mutable.Set[BigInt]() @@ -317,6 +341,9 @@ class DSG(val proc: Procedure, removed.foreach(node.cells.remove) } + /** + * merges two neighbouring cells into one + */ def mergeNeighbours(cell1: DSC, cell2: DSC): DSC = require(cell1.node.equals(cell2.node) && cell1.offset < cell2.offset) if pointTo.contains(cell2) then @@ -335,24 +362,30 @@ class DSG(val proc: Procedure, cell1 + /** + * merges two cells and unifies their nodes + * @param cell1 + * @param cell2 + * @return the resulting cell in the unified node + */ def mergeCells(cell1: DSC, cell2: DSC): DSC = - if cell1.equals(cell2) then + if cell1.equals(cell2) then // same cell no action required cell1 - else if cell1.node.isDefined && cell1.node.equals(cell2.node) then + else if cell1.node.isDefined && cell1.node.equals(cell2.node) then // same node different cells causes collapse collapseNode(cell1.node.get) cell1.node.get.cells(0) else if cell1.node.isEmpty then replace(cell1, cell2, 0) cell2 - else if cell1.node.get.collapsed || cell2.node.get.collapsed then + else if cell1.node.get.collapsed || cell2.node.get.collapsed then // a collapsed node val node1 = cell1.node.get val node2 = cell2.node.get - collapseNode(node1) + collapseNode(node1) // collapse the other node collapseNode(node2) - node2.allocationRegions.addAll(node1.allocationRegions) + node2.allocationRegions.addAll(node1.allocationRegions) // add regions and flags of node 1 to node 2 node2.flags.join(node1.flags) - if pointTo.contains(node1.cells(0)) then + if pointTo.contains(node1.cells(0)) then // merge the pointees of the two collapsed (single cell) nodes if pointTo.contains(node2.cells(0)) then val slice1 = getPointee(node1.cells(0)) val slice2 = getPointee(node2.cells(0)) @@ -363,8 +396,9 @@ class DSG(val proc: Procedure, pointTo.remove(node1.cells(0)) replace(node1.cells(0), node2.cells(0), 0) node2.cells(0) - else - + else // standard merge + + // node 1 is the cell with the higher offset var delta = cell1.offset - cell2.offset var node1 = cell1.node.get var node2 = cell2.node.get @@ -374,25 +408,32 @@ class DSG(val proc: Procedure, node2 = cell1.node.get + // create a seq of all cells from both nodes in order of their offsets in the resulting unified node val cells : Seq[(BigInt, DSC)] = (node1.cells.toSeq ++ node2.cells.foldLeft(Seq[(BigInt, DSC)]()){ (s, tuple) => val offset = tuple._1 val cell = tuple._2 - s:+ ((offset + delta, cell)) + s:+ ((offset + delta, cell)) // cells from nodes two are adjusted by the difference between cell1 and cell2 offsets }).sortBy(_._1) var lastOffset: BigInt = -1 var lastAccess: BigInt = -1 + // create a new node to represent the unified node val resultNode = DSN(Some(this)) + // add nodes flags and regions to the resulting node resultNode.allocationRegions.addAll(node1.allocationRegions ++ node2.allocationRegions) resultNode.flags.join(node1.flags) resultNode.flags.join(node2.flags) - if node2.flags.global then - globalMapping.foreach{ - case ((start: BigInt, end: BigInt), (node:DSN, offset: BigInt)) => + if node2.flags.global then // node 2 may have been adjusted depending on cell1 and cell2 offsets + globalMapping.foreach{ // update global mapping if node 2 was global + case (range: AddressRange, Field(node, offset))=> if node.equals(node2) then - globalMapping.update((start, end), (node, offset + delta)) + globalMapping.update(range, Field(node, offset + delta)) } + + // compute the cells present in the resulting unified node + // a mapping from offsets to the set of old cells which are merged to form a cell in the new unified node + // values in the mapping also include the largest access size so far computed for each resulting cell val resultCells: mutable.Map[BigInt, (Set[DSC], BigInt)] = mutable.Map() cells.foreach { case (offset: BigInt, cell: DSC) => @@ -447,11 +488,14 @@ class DSG(val proc: Procedure, resultNode.getCell(cell2.offset) + private def isFormal(pos: CFGPosition, variable: Variable): Boolean = !reachingDefs(pos).contains(variable) - + // formal arguments to this function val formals: mutable.Map[Variable, Slice] = mutable.Map() + + // mapping from each SSA variable (position, variable) to a slice val varToCell: mutable.Map[CFGPosition, mutable.Map[Variable, Slice]] = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(mutable.Map[CFGPosition, mutable.Map[Variable, Slice]]()) { (m, pos) => pos match @@ -487,6 +531,7 @@ class DSG(val proc: Procedure, case _ => m } + def cloneSelf(): DSG = val newGraph = DSG(proc, constProp, varToSym, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params) assert(formals.size == newGraph.formals.size) @@ -527,12 +572,12 @@ class DSG(val proc: Procedure, } globalMapping.foreach { - case ((start: BigInt, end: BigInt), (node: DSN, internalOffset: BigInt)) => - assert(newGraph.globalMapping.contains((start, end))) + case (range: AddressRange, Field(node, offset)) => + assert(newGraph.globalMapping.contains(range)) if !idToNode.contains(node.id) then val newNode = node.cloneSelf(newGraph) idToNode.update(node.id, newNode) - newGraph.globalMapping.update((start, end), (idToNode(node.id), internalOffset)) + newGraph.globalMapping.update(range, Field(idToNode(node.id), offset)) } newGraph.pointTo.clear() @@ -601,6 +646,9 @@ class Flags() { foreign = other.foreign && foreign } +/** + * a Data structure Node + */ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCounter.getCounter) { // var collapsed = false @@ -755,7 +803,13 @@ class CallSite(val call: DirectCall, val graph: DSG) { } } +// global address range +case class AddressRange(start: BigInt, end: BigInt) + +// a node, offset pair, difference to a cell is that it doesn't represent a DSG construct, +case class Field(node: DSN, offset: BigInt) +// unwraps internal padding and slicing and returns the expression def unwrapPaddingAndSlicing(expr: Expr): Expr = expr match case literal: Literal => literal @@ -778,8 +832,6 @@ def adjust(slice: Slice): DSC = val internal = slice.internalOffset adjust(cell, internal) -// minimum 2's complement 64 bit negative integer -val BITVECNEGATIVE: BigInt = new BigInt(new BigInteger("9223372036854775808")) diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index 06eb381ba..cc904406c 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -1,6 +1,6 @@ package analysis -import analysis.BitVectorEval.bv2int +import analysis.BitVectorEval.{bv2SignedInt, isNegative} import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Endian, Expr, Extract, IntraProcIRCursor, MemoryAssign, MemoryLoad, Procedure, Register, Variable, ZeroExtend, computeDomain, toShortString} import specification.{ExternalFunction, SpecGlobal, SymbolTableEntry} @@ -34,9 +34,11 @@ class Local( private val mallocRegister = Register("R0", 64) private val stackPointer = Register("R31", 64) + // set of cfg positions already processed by the analysis local phase private val visited: mutable.Set[CFGPosition] = mutable.Set() + // variables to symbolic access map for each cfg position val varToSym: Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]] = symResults.foldLeft(Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]]()) { (outerMap, syms) => val position = syms._1 @@ -54,8 +56,12 @@ class Local( } outerMap + (position -> innerMap) } - + + /** + * if an expr is the address of a stack location return its corresponding cell + * @param pos IL position where the expression is used + */ def isStack(expr: Expr, pos: CFGPosition): Option[DSC] = expr match case BinaryExpr(op, arg1: Variable, arg2) if varToSym.contains(pos) && varToSym(pos).contains(arg1) && @@ -88,16 +94,20 @@ class Local( val graph: DSG = DSG(proc, constProp, varToSym, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params) + /** + * if an expr is the address of a global location return its corresponding cell + * @param pos IL position where the expression is used + */ def isGlobal(expr: Expr, pos: CFGPosition, size: Int = 0): Option[DSC] = val value = evaluateExpression(expr, constProp(pos)) if value.isDefined then val global = graph.isGlobal(value.get.value) if global.isDefined then val address = value.get.value - val ((baseAddress: BigInt, end: BigInt), (node: DSN, internal: BigInt)) = global.get - val offset = address - baseAddress + val (range: AddressRange, Field(node, internal)) = global.get + val offset = address - range.start node.addCell(internal + offset, size) - graph.optionalCollapse(node) + graph.selfCollapse(node) if node.collapsed then Some(node.cells(0)) else @@ -155,16 +165,19 @@ class Local( val node = t._1.node.get val cell = node.getCell(offset + internalOffset) if graph.pointTo.contains(cell) && graph.pointTo(cell)._1.equals(result) then - graph.optionalCollapse(node) + graph.selfCollapse(node) assert(graph.pointTo.contains(node.getCell(offset))) result = graph.getPointee(node.getCell(offset))._1 else - graph.optionalCollapse(node) + graph.selfCollapse(node) ) val resultOffset = result.offset - graph.optionalCollapse(result.node.get) + graph.selfCollapse(result.node.get) result.node.get.getCell(result.offset) + /** + * handles unsupported pointer arithmetic by collapsing all the nodes invloved + */ def unsupportedPointerArithmeticOperation(n: CFGPosition, expr: Expr, lhsCell: DSC): Unit = { var containsPointer = false breakable { @@ -195,7 +208,7 @@ class Local( else visited.add(n) n match - case DirectCall(proc, target, label) if proc.name == "malloc" => + case DirectCall(proc, target, label) if proc.name == "malloc" => // R0 = Malloc() val size: BigInt = evaluateExpression(mallocRegister, constProp(n)) match case Some(value) => value.value case None => 0 @@ -203,7 +216,8 @@ class Local( node.allocationRegions.add(HeapLocation(nextMallocCount, proc, size)) node.flags.heap = true graph.mergeCells(graph.varToCell(n)(mallocRegister)._1, node.cells(0)) - case call: DirectCall if params.contains(call.target) => + case call: DirectCall if params.contains(call.target) => // Rx, Ry, ... Rn = FunctionCall() + // create call sites for the callees val cs = CallSite(call, graph) graph.callsites.add(cs) cs.paramCells.foreach{ @@ -220,32 +234,34 @@ class Local( val lhsCell = adjust(graph.varToCell(n)(variable)) var global = isGlobal(rhs, n) var stack = isStack(rhs, n) - if global.isDefined then + if global.isDefined then // Rx = global address graph.mergeCells(lhsCell, global.get) - else if stack.isDefined then // just in case stack can't be recognised in after this assignment + else if stack.isDefined then // Rx = stack address graph.mergeCells(lhsCell, stack.get) else expr match - case BinaryExpr(op, arg1: Variable, arg2) => + case BinaryExpr(op, arg1: Variable, arg2) => // Rx = Rx + c val arg2Offset = evaluateExpression(arg2, constProp(n)) - if op.equals(BVADD) && arg1.equals(stackPointer) - && arg2Offset.isDefined && arg2Offset.get.value >= BITVECNEGATIVE then - val size = bv2int(arg2Offset.get) - val node = DSN(Some(graph)) - node.allocationRegions.add(StackLocation("Stack_"+proc.name, proc, -size)) - node.flags.stack = true - graph.mergeCells(lhsCell, node.cells(0)) + && arg2Offset.isDefined && isNegative(arg2Offset.get) then + () // the stack is handled prior to this +// val size = bv2SignedInt(arg2Offset.get) +// val node = DSN(Some(graph)) +// node.allocationRegions.add(StackLocation("Stack_"+proc.name, proc, -size)) +// node.flags.stack = true +// graph.mergeCells(lhsCell, node.cells(0)) else if /*varToSym.contains(n) && varToSym(n).contains(arg1) && */ arg2Offset.isDefined then + // merge lhs with cell(s) corresponding to (arg1 + arg2) where arg1 is cell and arg2 is an offset val offset = evaluateExpression(arg2, constProp(n)).get.value visitPointerArithmeticOperation(n, lhsCell, arg1, 0, false, offset) - else + else // c can't be statically evaluated unsupportedPointerArithmeticOperation(n, expr, lhsCell) + // Rx = Ry merge corresponding cells to Rx and Ry case arg: Variable /*if varToSym.contains(n) && varToSym(n).contains(arg)*/ => visitPointerArithmeticOperation(n, lhsCell, arg, 0) - case MemoryLoad(mem, index, endian, size) => + case MemoryLoad(mem, index, endian, size) => // Rx = Mem[Ry], merge Rx and pointee of Ry (E(Ry)) val byteSize = (size.toDouble/8).ceil.toInt lhsCell.node.get.flags.read = true global = isGlobal(index, n, byteSize) @@ -273,6 +289,7 @@ class Local( unsupportedPointerArithmeticOperation(n, expr, lhsCell) case MemoryAssign(memory, ind: Expr, expr: Expr, endian: Endian, size: Int, label) if unwrapPaddingAndSlicing(expr).isInstanceOf[Variable] => // if value is a literal ignore it + // Mem[Ry] = Rx val value: Variable = unwrapPaddingAndSlicing(expr).asInstanceOf[Variable] val index: Expr = unwrapPaddingAndSlicing(ind) reachingDefs(n)(value).foreach(visit) diff --git a/src/main/scala/analysis/SymbolicAccessAnalysis.scala b/src/main/scala/analysis/SymbolicAccessAnalysis.scala index 85ceb4c75..0cd79400e 100644 --- a/src/main/scala/analysis/SymbolicAccessAnalysis.scala +++ b/src/main/scala/analysis/SymbolicAccessAnalysis.scala @@ -1,6 +1,6 @@ package analysis -import analysis.BitVectorEval.bv2int +import analysis.BitVectorEval.{bv2SignedInt, isNegative} import analysis.solvers.ForwardIDESolver import ir.IRWalk.procedure import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, GoTo, IndirectCall, Literal, Memory, MemoryLoad, Procedure, Program, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend} @@ -98,12 +98,12 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem case BinaryExpr(op, arg1: Variable, arg2) => evaluateExpression(arg2, constProp(n)) match case Some(v) => - if op.equals(BVADD) && arg1.equals(stackPointer) && v.value >= BITVECNEGATIVE then + if op.equals(BVADD) && arg1.equals(stackPointer) && isNegative(v) then d match case Left(value) if value.accessor == variable => Map() case Left(value) => Map(d -> IdEdge()) case Right(_) => - val size = bv2int(v) + val size = bv2SignedInt(v) Map(d -> IdEdge(), Left(SymbolicAccess(variable, StackLocation(s"Stack_${procedure(n).name}", procedure(n), -size), 0)) -> ConstEdge(TwoElementTop)) else d match diff --git a/src/test/scala/LocalTest.scala b/src/test/scala/LocalTest.scala index ad95f12b7..88dbb7e6a 100644 --- a/src/test/scala/LocalTest.scala +++ b/src/test/scala/LocalTest.scala @@ -1,4 +1,4 @@ -import analysis.{DSC, DSG, DSN, DataLocation, HeapLocation} +import analysis.{AddressRange, DSC, DSG, DSN, DataLocation, HeapLocation} import ir.Endian.BigEndian import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, ConvertToSingleProcedureReturn, DirectCall, Memory, MemoryAssign, MemoryLoad, SharedMemory} import org.scalatest.funsuite.AnyFunSuite @@ -38,7 +38,7 @@ class LocalTest extends AnyFunSuite, TestUtil { assert(stack24.node.get.collapsed) assert(dsg.pointTo(stack24)._1.equals(stack24)) - assert(dsg.pointTo(stack40).equals(dsg.getPointee(dsg.getPointee(dsg.globalMapping((69600, 69600))._1.cells(0))._1))) + assert(dsg.pointTo(stack40).equals(dsg.getPointee(dsg.getPointee(dsg.globalMapping(AddressRange(69600, 69600))._1.cells(0))._1))) } @@ -60,19 +60,19 @@ class LocalTest extends AnyFunSuite, TestUtil { val dsg = results.analysis.get.locals.get(program.procs("sub_seven")) assert(dsg.pointTo.size == 9) assert(dsg.stackMapping.isEmpty) - println(dsg.globalMapping((69648, 69652))._1.cells(0)) - assert(dsg.pointTo(dsg.globalMapping((69648, 69652))._1.cells(0))._1.node.get.collapsed) + println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) // initial global mappings - assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) } @@ -94,18 +94,18 @@ class LocalTest extends AnyFunSuite, TestUtil { val dsg = results.analysis.get.locals.get(program.procs("add_six")) assert(dsg.pointTo.size == 9) assert(dsg.stackMapping.isEmpty) - assert(dsg.pointTo(dsg.globalMapping((69648, 69652))._1.cells(0))._1.node.get.collapsed) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) // initial global mappings - assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) } @@ -127,19 +127,19 @@ class LocalTest extends AnyFunSuite, TestUtil { val dsg = results.analysis.get.locals.get(program.procs("add_two")) assert(dsg.pointTo.size == 9) assert(dsg.stackMapping.isEmpty) - println(dsg.globalMapping((69648, 69652))._1.cells(0)) - assert(dsg.pointTo(dsg.globalMapping((69648, 69652))._1.cells(0))._1.node.get.collapsed) + println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) // initial global mappings - assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) } @@ -172,15 +172,15 @@ class LocalTest extends AnyFunSuite, TestUtil { assert(dsg.pointTo(stack28).equals(dsg.formals(R0))) // initial global mappings - assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) } @@ -205,7 +205,7 @@ class LocalTest extends AnyFunSuite, TestUtil { val dsg = results.analysis.get.locals.get(program.mainProcedure) // assert(dsg.pointTo.size == 7) // assert(dsg.stackMapping.isEmpty) -// assert(dsg.pointTo(dsg.globalMapping((69680, 69684))._1.cells(0))._1.node.get.collapsed) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69680, 69684))._1.cells(0))._1.node.get.collapsed) } @@ -452,19 +452,19 @@ class LocalTest extends AnyFunSuite, TestUtil { val dsg = results.analysis.get.bus.get(program.procs("sub_seven")) assert(dsg.pointTo.size == 9) assert(dsg.stackMapping.isEmpty) - println(dsg.globalMapping((69648, 69652))._1.cells(0)) - assert(dsg.pointTo(dsg.globalMapping((69648, 69652))._1.cells(0))._1.node.get.collapsed) + println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) // initial global mappings - assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) } @@ -487,19 +487,19 @@ class LocalTest extends AnyFunSuite, TestUtil { val dsg = results.analysis.get.bus.get(program.procs("add_six")) assert(dsg.pointTo.size == 9) assert(dsg.stackMapping.isEmpty) - println(dsg.globalMapping((69648, 69652))._1.cells(0)) - assert(dsg.pointTo(dsg.globalMapping((69648, 69652))._1.cells(0))._1.node.get.collapsed) + println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) // initial global mappings - assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) } @@ -521,19 +521,19 @@ class LocalTest extends AnyFunSuite, TestUtil { val dsg = results.analysis.get.bus.get(program.procs("add_two")) assert(dsg.pointTo.size == 9) assert(dsg.stackMapping.isEmpty) - println(dsg.globalMapping((69648, 69652))._1.cells(0)) - assert(dsg.pointTo(dsg.globalMapping((69648, 69652))._1.cells(0))._1.node.get.collapsed) + println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) // initial global mappings - assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) } @@ -566,18 +566,18 @@ class LocalTest extends AnyFunSuite, TestUtil { assert(dsg.pointTo(stack28).equals(dsg.formals(R0))) // initial global mappings - assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) // bu - assert(dsg.pointTo(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))._1.node.get.collapsed) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))._1.node.get.collapsed) } @@ -674,18 +674,18 @@ class LocalTest extends AnyFunSuite, TestUtil { assert(dsg.pointTo(stack28).equals(dsg.formals(R0))) // initial global mappings - assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) // bu - assert(dsg.pointTo(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))._1.node.get.collapsed) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))._1.node.get.collapsed) } test("top down jumptable2 sub_seven") { @@ -706,19 +706,19 @@ class LocalTest extends AnyFunSuite, TestUtil { val dsg = results.analysis.get.tds.get(program.procs("sub_seven")) assert(dsg.pointTo.size == 9) assert(dsg.stackMapping.isEmpty) - println(dsg.globalMapping((69648, 69652))._1.cells(0)) - assert(dsg.pointTo(dsg.globalMapping((69648, 69652))._1.cells(0))._1.node.get.collapsed) + println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) // initial global mappings - assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) } @@ -741,19 +741,19 @@ class LocalTest extends AnyFunSuite, TestUtil { val dsg = results.analysis.get.tds.get(program.procs("add_six")) assert(dsg.pointTo.size == 9) assert(dsg.stackMapping.isEmpty) - println(dsg.globalMapping((69648, 69652))._1.cells(0)) - assert(dsg.pointTo(dsg.globalMapping((69648, 69652))._1.cells(0))._1.node.get.collapsed) + println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) // initial global mappings - assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) } @@ -775,19 +775,19 @@ class LocalTest extends AnyFunSuite, TestUtil { val dsg = results.analysis.get.tds.get(program.procs("add_two")) assert(dsg.pointTo.size == 9) assert(dsg.stackMapping.isEmpty) - println(dsg.globalMapping((69648, 69652))._1.cells(0)) - assert(dsg.pointTo(dsg.globalMapping((69648, 69652))._1.cells(0))._1.node.get.collapsed) + println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) // initial global mappings - assert(dsg.pointTo(dsg.globalMapping((69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping((2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping((1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping((69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping((2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping((1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping((2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping((2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping((69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping((69648, 69648 + 4))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) + assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) } From ce94199364a8e12437c3b09b1a428eb06ca02f23 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 5 Aug 2024 11:29:20 +1000 Subject: [PATCH 019/104] added solver --- src/main/scala/analysis/DSAUtility.scala | 45 +++++++++++- src/main/scala/analysis/Local.scala | 9 +++ .../analysis/solvers/DSAUnionFindSolver.scala | 72 +++++++++++++++++++ 3 files changed, 125 insertions(+), 1 deletion(-) create mode 100644 src/main/scala/analysis/solvers/DSAUnionFindSolver.scala diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index 90fd2e595..d253e8b4d 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -1,5 +1,6 @@ package analysis +import analysis.solvers.{DSAUnionFindSolver, UnionFindSolver, Var} import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IntraProcIRCursor, Literal, Memory, MemoryAssign, MemoryLoad, Procedure, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, begin, computeDomain, toShortString} import specification.{ExternalFunction, SpecGlobal, SymbolTableEntry} @@ -362,6 +363,26 @@ class DSG(val proc: Procedure, cell1 + // private val parent = mutable.Map[DSC, DSC]() + val solver: DSAUnionFindSolver[UniTerm] = DSAUnionFindSolver() + + +// val offsets = mutable.Map[DSN, BigInt]() + +// private def findOffset(current: DSN, result: DSN): BigInt = +// if current == result then +// 0 +// else +// current.offset + findOffset(current.embeddedIn.get, result) +// +// +// def resolve(cell: DSC): DSC = +// val node = cell.node.get +// val result: DSN = solver.find(Derm(node)).asInstanceOf[Derm].node +// val offset = findOffset(node, result) +// result.getCell(offset) + + /** * merges two cells and unifies their nodes * @param cell1 @@ -381,8 +402,11 @@ class DSG(val proc: Procedure, else if cell1.node.get.collapsed || cell2.node.get.collapsed then // a collapsed node val node1 = cell1.node.get val node2 = cell2.node.get + solver.unify(node1.term, node2.term, 0) collapseNode(node1) // collapse the other node collapseNode(node2) + node2.children.addAll(node1.children) + node2.children += (node1 -> 0) node2.allocationRegions.addAll(node1.allocationRegions) // add regions and flags of node 1 to node 2 node2.flags.join(node1.flags) if pointTo.contains(node1.cells(0)) then // merge the pointees of the two collapsed (single cell) nodes @@ -424,6 +448,10 @@ class DSG(val proc: Procedure, resultNode.allocationRegions.addAll(node1.allocationRegions ++ node2.allocationRegions) resultNode.flags.join(node1.flags) resultNode.flags.join(node2.flags) + resultNode.children.addAll(node1.children) + resultNode.children += (node1 -> 0) + resultNode.children.addAll(node2.children.map(f => (f._1, f._2 + delta))) + resultNode.children += (node2 -> delta) if node2.flags.global then // node 2 may have been adjusted depending on cell1 and cell2 offsets globalMapping.foreach{ // update global mapping if node 2 was global case (range: AddressRange, Field(node, offset))=> @@ -481,7 +509,9 @@ class DSG(val proc: Procedure, } pointTo.update(collapsedCell, Slice(result, internal)) } - + + solver.unify(node1.term, resultNode.term, 0) + solver.unify(node2.term, resultNode.term, delta) if cell1.offset >= cell2.offset then resultNode.getCell(cell1.offset) else @@ -651,6 +681,8 @@ class Flags() { */ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCounter.getCounter) { + val term = Derm(this) + val children : mutable.Map[DSN, BigInt] = mutable.Map() // var collapsed = false var flags = Flags() def collapsed = flags.collapsed @@ -832,6 +864,17 @@ def adjust(slice: Slice): DSC = val internal = slice.internalOffset adjust(cell, internal) +/** Terms used in unification. + */ +sealed trait UniTerm + +/** A term variable in the solver + */ +case class Derm(node: DSN) extends UniTerm with Var[UniTerm] { + + override def toString: String = s"Term{${node}}" +} + diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index cc904406c..a46b59f6b 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -332,6 +332,15 @@ class Local( domain.foreach(visit) + val b = graph.solver.solution() graph.collectNodes + graph.nodes.foreach(node => + node.children.foreach( + child => + assert(graph.solver.find(child._1.term)._1.equals(node.term)) + assert(graph.solver.find(child._1.term)._2.equals(child._2)) + + ) + ) graph } diff --git a/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala b/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala new file mode 100644 index 000000000..62d6a5082 --- /dev/null +++ b/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala @@ -0,0 +1,72 @@ +package analysis.solvers + +import scala.collection.mutable + +class DSAUnionFindSolver[A] { + + + val parent = mutable.Map[Term[A], Term[A]]() + val offsets = mutable.Map[Term[A], BigInt]() + + def unify(t1: Term[A], t2: Term[A], offset: BigInt): Unit = { + mkSet(t1) + mkSet(t2) + val rep1 = find(t1)._1 + val rep2 = find(t2)._1 + + if (rep1 == rep2) return + + (rep1, rep2) match { + case (v1: Var[A], v2: Var[A]) => + mkUnion(v1, v2, offset) + case (v1: Var[A], t2: Term[A]) => + mkUnion(v1, t2, offset) + case (t1: Term[A], v2: Var[A]) => + mkUnion(v2, t1, offset) + case (f1: Cons[A], f2: Cons[A]) if f1.doMatch(f2) => + mkUnion(f1, f2, offset) + f1.args.zip(f2.args).foreach { case (a1, a2) => + unify(a1, a2, offset) + } + case (x, y) => + throw new UnificationFailure(s"Cannot unify $t1 and $t2 (with representatives $x and $y)") + } + } + + def find(t: Term[A]): (Term[A], BigInt) = { + mkSet(t) + if (parent(t) != t) + val (par, offset) = find(parent(t)) + parent += t -> par + offsets += t -> offsets(t).+(offset) + + (parent(t), offsets(t)) + } + + /** Perform the union of the equivalence classes of `t1` and `t2`, such that `t2` becomes the new canonical element. + * We assume `t1` and `t2` to be distinct canonical elements. This implementation does not use + * [[https://en.wikipedia.org/wiki/Disjoint-set_data_structure union-by-rank]]. + */ + def mkUnion(t1: Term[A], t2: Term[A], offset: BigInt): Unit = + parent += t1 -> t2 + offsets += t1 -> offset + + /** Creates an equivalence class for the term `t`, if it does not exists already. + */ + def mkSet(t: Term[A]): Unit = + if (!parent.contains(t)) + parent += (t -> t) + offsets += (t -> 0) + + /** Returns the solution of the solver. Note that the terms in the solution have not yet been closed, i.e. they may + * contain constraint variables. + * + * @return + * a map associating to each variable the representative of its equivalence class + */ + def solution(): Map[Var[A], Term[A]] = + // for each constraint variable, find its canonical representative (using the variable itself as default) + parent.keys.collect { case v: Var[A] => (v, find(v)._1) }.toMap.withDefault(v => v) + + +} From 17a35a266ce4e2949e7a02fca4b5e5859e2fc297 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Thu, 8 Aug 2024 12:27:31 +1000 Subject: [PATCH 020/104] union-find merge --- src/main/scala/analysis/DSA.scala | 291 ++-- src/main/scala/analysis/DSAUtility.scala | 362 +++-- src/main/scala/analysis/Local.scala | 30 +- src/test/scala/LocalTest.scala | 1651 +++++++++++----------- 4 files changed, 1196 insertions(+), 1138 deletions(-) diff --git a/src/main/scala/analysis/DSA.scala b/src/main/scala/analysis/DSA.scala index 91b645140..c7569c2bf 100644 --- a/src/main/scala/analysis/DSA.scala +++ b/src/main/scala/analysis/DSA.scala @@ -69,153 +69,154 @@ class DSA(program: Program, val dsg = Local(proc, symResults, constProp, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params).analyze() locals.update(proc, dsg) - bu.update(proc, dsg.cloneSelf()) +// bu.update(proc, dsg.cloneSelf()) ) - val leafNodes = findLeaf(program.mainProcedure) - - leafNodes.foreach( - proc => - assert(locals(proc).callsites.isEmpty) - visited += proc - val preds : Set[Procedure] = CallGraph.pred(proc) - queue.enqueueAll(CallGraph.pred(proc).diff(visited).intersect(domain)) - ) - - // bottom up phase - while queue.nonEmpty do - val proc = queue.dequeue() - visited += proc - queue.enqueueAll(CallGraph.pred(proc).diff(visited)) - val buGraph = bu(proc) - - buGraph.callsites.foreach( - callSite => - val callee = callSite.proc - val calleeGraph = locals(callee) //.cloneSelf() - assert(buGraph.globalMapping.keySet.equals(calleeGraph.globalMapping.keySet)) - assert(calleeGraph.formals.keySet.diff(ignoreRegisters).equals(callSite.paramCells.keySet)) - - calleeGraph.globalMapping.foreach { - case (range: AddressRange, Field(node, offset)) => - node.cloneNode(calleeGraph, buGraph) - } - - calleeGraph.formals.foreach{ - case (variable: Variable, slice: Slice) if !ignoreRegisters.contains(variable) => - assert(callSite.paramCells.contains(variable)) - val node = slice.node - node.cloneNode(calleeGraph, buGraph) - case _ => - } - - assert(writesTo(callee).equals(callSite.returnCells.keySet)) - writesTo(callee).foreach( - reg => - assert(callSite.returnCells.contains(reg)) - val returnCells = calleeGraph.getCells(end(callee), reg) - assert(returnCells.nonEmpty) - returnCells.foreach{ - case slice: Slice => - val node = slice.node - node.cloneNode(calleeGraph, buGraph) - } - ) - -// assert(calleeGraph.formals.isEmpty || buGraph.varToCell(begin(callee)).equals(calleeGraph.formals)) - val globalNodes: mutable.Map[Int, DSN] = mutable.Map() - calleeGraph.globalMapping.foreach { - case (range: AddressRange, Field(node: DSN, offset: BigInt)) => - buGraph.mergeCells(buGraph.globalMapping(range)._1.getCell(buGraph.globalMapping(range)._2), - node.getCell(offset)) - } - - buGraph.varToCell.getOrElse(begin(callee), Map.empty).foreach{ - case (variable: Variable, formal) if !ignoreRegisters.contains(variable) => - buGraph.mergeCells(adjust(formal), adjust(callSite.paramCells(variable))) - case _ => - } - writesTo(callee).foreach( - reg => - val returnCells = buGraph.getCells(end(callee), reg) - // assert(returnCells.nonEmpty) - val result: DSC = returnCells.foldLeft(adjust(callSite.returnCells(reg))){ - // - case (c: DSC, ret) => - buGraph.mergeCells(c, adjust(ret)) - } - ) - ) - buGraph.collectNodes - // bottom up phase finished - // clone bu graphs to top-down graphs - domain.foreach( - proc => - td.update(proc, bu(proc).cloneSelf()) - ) - - queue.enqueue(program.mainProcedure) - visited = Set() - - - // top-down phase - while queue.nonEmpty do - val proc = queue.dequeue() - visited += proc - queue.enqueueAll(CallGraph.succ(proc).diff(visited)) - val callersGraph = td(proc) - callersGraph.callsites.foreach( - callSite => - val callee = callSite.proc - val calleesGraph = td(callee) - assert(callersGraph.globalMapping.keySet.equals(calleesGraph.globalMapping.keySet)) - - callersGraph.globalMapping.foreach { - case (range: AddressRange, Field(node, offset)) => - node.cloneNode(callersGraph, calleesGraph) - } - - - callSite.paramCells.foreach{ - case (variable: Variable, slice: Slice) => - val node = slice.node - node.cloneNode(callersGraph, calleesGraph) - } - - callSite.returnCells.foreach{ - case (variable: Variable, slice: Slice) => - val node = slice.node - node.cloneNode(callersGraph, callersGraph) - } - - - callersGraph.globalMapping.foreach { - case (range: AddressRange, Field(node, internal)) => - calleesGraph.mergeCells(calleesGraph.globalMapping(range)._1.getCell(calleesGraph.globalMapping(range)._2), - node.getCell(internal)) - } - - callSite.paramCells.keySet.foreach( - variable => - val paramCells = calleesGraph.getCells(callSite.call, variable) - paramCells.foldLeft(adjust(calleesGraph.formals(variable))) { - (cell, slice) => - calleesGraph.mergeCells(adjust(slice), cell) - } - ) - - calleesGraph.varToCell.getOrElse(callSite.call, Map.empty).foreach{ - case (variable: Variable, cell: Slice) => - val returnCells = calleesGraph.getCells(end(callee), variable) - returnCells.foldLeft(adjust(cell)){ - case (c: DSC, retCell: Slice) => - calleesGraph.mergeCells(c, adjust(retCell)) - } - case _ => ??? - } - ) - callersGraph.collectNodes - td.toMap + Map() +// val leafNodes = findLeaf(program.mainProcedure) +// +// leafNodes.foreach( +// proc => +// assert(locals(proc).callsites.isEmpty) +// visited += proc +// val preds : Set[Procedure] = CallGraph.pred(proc) +// queue.enqueueAll(CallGraph.pred(proc).diff(visited).intersect(domain)) +// ) +// +// // bottom up phase +// while queue.nonEmpty do +// val proc = queue.dequeue() +// visited += proc +// queue.enqueueAll(CallGraph.pred(proc).diff(visited)) +// val buGraph = bu(proc) +// +// buGraph.callsites.foreach( +// callSite => +// val callee = callSite.proc +// val calleeGraph = locals(callee) //.cloneSelf() +// assert(buGraph.globalMapping.keySet.equals(calleeGraph.globalMapping.keySet)) +// assert(calleeGraph.formals.keySet.diff(ignoreRegisters).equals(callSite.paramCells.keySet)) +// +// calleeGraph.globalMapping.foreach { +// case (range: AddressRange, Field(node, offset)) => +// node.cloneNode(calleeGraph, buGraph) +// } +// +// calleeGraph.formals.foreach{ +// case (variable: Variable, slice: Slice) if !ignoreRegisters.contains(variable) => +// assert(callSite.paramCells.contains(variable)) +// val node = slice.node +// node.cloneNode(calleeGraph, buGraph) +// case _ => +// } +// +// assert(writesTo(callee).equals(callSite.returnCells.keySet)) +// writesTo(callee).foreach( +// reg => +// assert(callSite.returnCells.contains(reg)) +// val returnCells = calleeGraph.getCells(end(callee), reg) +// assert(returnCells.nonEmpty) +// returnCells.foreach{ +// case slice: Slice => +// val node = slice.node +// node.cloneNode(calleeGraph, buGraph) +// } +// ) +// +//// assert(calleeGraph.formals.isEmpty || buGraph.varToCell(begin(callee)).equals(calleeGraph.formals)) +// val globalNodes: mutable.Map[Int, DSN] = mutable.Map() +// calleeGraph.globalMapping.foreach { +// case (range: AddressRange, Field(node: DSN, offset: BigInt)) => +// buGraph.mergeCells(buGraph.globalMapping(range)._1.getCell(buGraph.globalMapping(range)._2), +// node.getCell(offset)) +// } +// +// buGraph.varToCell.getOrElse(begin(callee), Map.empty).foreach{ +// case (variable: Variable, formal) if !ignoreRegisters.contains(variable) => +// buGraph.mergeCells(buGraph.adjust(formal), buGraph.adjust(callSite.paramCells(variable))) +// case _ => +// } +// writesTo(callee).foreach( +// reg => +// val returnCells = buGraph.getCells(end(callee), reg) +// // assert(returnCells.nonEmpty) +// val result: DSC = returnCells.foldLeft(buGraph.adjust(callSite.returnCells(reg))){ +// // +// case (c: DSC, ret) => +// buGraph.mergeCells(c, buGraph.adjust(ret)) +// } +// ) +// ) +// buGraph.collectNodes +// // bottom up phase finished +// // clone bu graphs to top-down graphs +// domain.foreach( +// proc => +// td.update(proc, bu(proc).cloneSelf()) +// ) +// +// queue.enqueue(program.mainProcedure) +// visited = Set() +// +// +// // top-down phase +// while queue.nonEmpty do +// val proc = queue.dequeue() +// visited += proc +// queue.enqueueAll(CallGraph.succ(proc).diff(visited)) +// val callersGraph = td(proc) +// callersGraph.callsites.foreach( +// callSite => +// val callee = callSite.proc +// val calleesGraph = td(callee) +// assert(callersGraph.globalMapping.keySet.equals(calleesGraph.globalMapping.keySet)) +// +// callersGraph.globalMapping.foreach { +// case (range: AddressRange, Field(node, offset)) => +// node.cloneNode(callersGraph, calleesGraph) +// } +// +// +// callSite.paramCells.foreach{ +// case (variable: Variable, slice: Slice) => +// val node = slice.node +// node.cloneNode(callersGraph, calleesGraph) +// } +// +// callSite.returnCells.foreach{ +// case (variable: Variable, slice: Slice) => +// val node = slice.node +// node.cloneNode(callersGraph, callersGraph) +// } +// +// +// callersGraph.globalMapping.foreach { +// case (range: AddressRange, Field(node, internal)) => +// calleesGraph.mergeCells(calleesGraph.globalMapping(range)._1.getCell(calleesGraph.globalMapping(range)._2), +// node.getCell(internal)) +// } +// +// callSite.paramCells.keySet.foreach( +// variable => +// val paramCells = calleesGraph.getCells(callSite.call, variable) +// paramCells.foldLeft(calleesGraph.adjust(calleesGraph.formals(variable))) { +// (cell, slice) => +// calleesGraph.mergeCells(calleesGraph.adjust(slice), cell) +// } +// ) +// +// calleesGraph.varToCell.getOrElse(callSite.call, Map.empty).foreach{ +// case (variable: Variable, cell: Slice) => +// val returnCells = calleesGraph.getCells(end(callee), variable) +// returnCells.foldLeft(calleesGraph.adjust(cell)){ +// case (c: DSC, retCell: Slice) => +// calleesGraph.mergeCells(c, calleesGraph.adjust(retCell)) +// } +// case _ => ??? +// } +// ) +// callersGraph.collectNodes +// td.toMap } } diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index d253e8b4d..8dbd4e124 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -42,7 +42,7 @@ class DSG(val proc: Procedure, val nodes: mutable.Set[DSN] = mutable.Set() // this is mapping of point-relations in the graph - val pointTo: mutable.Map[DSC, Slice] = mutable.Map() +// val pointTo: mutable.Map[DSC, Slice] = mutable.Map() // represent callees in proc val callsites: mutable.Set[CallSite] = mutable.Set() @@ -154,7 +154,7 @@ class DSG(val proc: Procedure, globalMapping.update(AddressRange(relocatedAddress, relocatedAddress + 8), Field(node, 0)) node - pointTo.update(node.cells(field), Slice(isGlobal(address).get._2._1.cells(0), 0)) + node.cells(field)._pointee = Some(Slice(isGlobal(address).get._2._1.cells(0), 0)) address = relocatedAddress } ) @@ -183,80 +183,80 @@ class DSG(val proc: Procedure, } global - private def replaceInEV(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = - varToCell.foreach( - (pos, m) => - m.foreach { - case (variable, slice) => - if slice.cell.equals(oldCell) then - m.update(variable, Slice(newCell, slice.internalOffset + internalOffsetChange)) - } - ) - - formals.foreach{ - case (variable, slice) => - if slice.cell.equals(oldCell) then - formals.update(variable, Slice(newCell, slice.internalOffset + internalOffsetChange)) - } - - private def replaceInPointTo(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = - pointTo.foreach { - case (pointer, slice: Slice) => - if slice.cell.equals(oldCell) then - pointTo.update(pointer, Slice(newCell, slice.internalOffset + internalOffsetChange)) - } - - private def replaceInGlobals(oldCell: DSC, newCell: DSC) = - if oldCell.node.isDefined then - globalMapping.foreach { - case (key, Field(node, offset)) => - if node.equals(oldCell.node.get) then - globalMapping.update(key, Field(newCell.node.get, offset)) - } - - private def replaceInStack(oldCell: DSC, newCell: DSC) = - if oldCell.node.isDefined then - stackMapping.foreach{ - case (offset, node) => - if node.equals(oldCell.node.get) then - stackMapping.update(offset, newCell.node.get) - } - - private def replaceInCallSites(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = - callsites.foreach( - callSite => - callSite.returnCells.foreach{ - case (variable: Variable, slice: Slice) => - if slice.cell.equals(oldCell) then - callSite.returnCells.update(variable, Slice(newCell, slice.internalOffset + internalOffsetChange)) - } - - callSite.paramCells.foreach{ - case (variable: Variable, slice: Slice) => - if slice.cell.equals(oldCell) then - callSite.paramCells.update(variable, Slice(newCell, slice.internalOffset + internalOffsetChange)) - } - ) +// private def replaceInEV(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = +// varToCell.foreach( +// (pos, m) => +// m.foreach { +// case (variable, slice) => +// if slice.cell.equals(oldCell) then +// m.update(variable, Slice(newCell, slice.internalOffset + internalOffsetChange)) +// } +// ) +// +// formals.foreach{ +// case (variable, slice) => +// if slice.cell.equals(oldCell) then +// formals.update(variable, Slice(newCell, slice.internalOffset + internalOffsetChange)) +// } +// +// private def replaceInPointTo(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = +// pointTo.foreach { +// case (pointer, slice: Slice) => +// if slice.cell.equals(oldCell) then +// pointTo.update(pointer, Slice(newCell, slice.internalOffset + internalOffsetChange)) +// } +// +// private def replaceInGlobals(oldCell: DSC, newCell: DSC) = +// if oldCell.node.isDefined then +// globalMapping.foreach { +// case (key, Field(node, offset)) => +// if node.equals(oldCell.node.get) then +// globalMapping.update(key, Field(newCell.node.get, offset)) +// } +// +// private def replaceInStack(oldCell: DSC, newCell: DSC) = +// if oldCell.node.isDefined then +// stackMapping.foreach{ +// case (offset, node) => +// if node.equals(oldCell.node.get) then +// stackMapping.update(offset, newCell.node.get) +// } +// +// private def replaceInCallSites(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = +// callsites.foreach( +// callSite => +// callSite.returnCells.foreach{ +// case (variable: Variable, slice: Slice) => +// if slice.cell.equals(oldCell) then +// callSite.returnCells.update(variable, Slice(newCell, slice.internalOffset + internalOffsetChange)) +// } +// +// callSite.paramCells.foreach{ +// case (variable: Variable, slice: Slice) => +// if slice.cell.equals(oldCell) then +// callSite.paramCells.update(variable, Slice(newCell, slice.internalOffset + internalOffsetChange)) +// } +// ) // replaces an old cell with a new cell in all the mappings and updates their slice offset if applicable // This is inefficient looking to replace it with a union-find approach - def replace(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = - replaceInEV(oldCell, newCell, internalOffsetChange) - replaceInPointTo(oldCell, newCell, internalOffsetChange) - replaceInGlobals(oldCell, newCell) - replaceInStack(oldCell, newCell) - replaceInCallSites(oldCell, newCell, internalOffsetChange) - - def getPointee(cell: DSC): Slice = - if !pointTo.contains(cell) then - val node = DSN(Some(this)) - pointTo.update(cell, Slice(node.cells(0), 0)) - pointTo(cell) - - def getPointeeAdjusted(cell:DSC): DSC = - val pointee = getPointee(cell) - adjust(pointee) +// def replace(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = +// replaceInEV(oldCell, newCell, internalOffsetChange) +// replaceInPointTo(oldCell, newCell, internalOffsetChange) +// replaceInGlobals(oldCell, newCell) +// replaceInStack(oldCell, newCell) +// replaceInCallSites(oldCell, newCell, internalOffsetChange) + +// def getPointee(cell: DSC): Slice = +// if !pointTo.contains(cell) then +// val node = DSN(Some(this)) +// pointTo.update(cell, Slice(node.cells(0), 0)) +// pointTo(cell) +// +// def getPointeeAdjusted(cell:DSC): DSC = +// val pointee = getPointee(cell) +// adjust(pointee) def getCells(pos: CFGPosition, arg: Variable): Set[Slice] = if reachingDefs(pos).contains(arg) then @@ -282,36 +282,40 @@ class DSG(val proc: Procedure, /** * Collapses the node causing it to lose field sensitivity */ - def collapseNode(node: DSN): Unit = - val collapedCell = DSC(Option(node), 0) - val e = DSC(None, 0) + def collapseNode(n: DSN): Unit = + val (term, offset) = solver.find(n.term) + val node = term.asInstanceOf[Derm].node + val collapedCell = DSC(Some(node), 0) + var pointeeInternalOffset: BigInt = 0 - val cell = node.cells.foldLeft(e) { + val cell = node.cells.tail.foldLeft(adjust(node.cells.head._2.getPointee)) { (c, field) => - - if pointTo.contains(field._2) && pointTo(field._2) == field._2 then - pointTo.update(field._2, Slice(collapedCell, 0)) + val cell = field._2 + if cell._pointee.isDefined && cell.getPointee.cell == cell then + cell._pointee = Some(Slice(collapedCell, 0)) +// collapedCell._pointee = Some(Slice(collapedCell, 0)) c - else if pointTo.contains(field._2) then - val slice = getPointee(field._2) + else if cell._pointee.isDefined then + val slice = cell.getPointee if slice.internalOffset > pointeeInternalOffset then pointeeInternalOffset = slice.internalOffset - mergeCells(c, getPointeeAdjusted(field._2)) + mergeCells(c, adjust(slice)) else c } - node.cells.values.foreach( - cell => - replace(cell, collapedCell, 0) - pointTo.foreach { - case (pointer, pointee) => - if pointer.equals(cell) then - pointTo.remove(pointer) - pointTo.update(collapedCell, pointee) - } - ) +// node.cells.values.foreach( +// cell => +//// replace(cell, collapedCell, 0) TODO check that this works by just ignoring the replace +// +// pointTo.foreach { +// case (pointer, pointee) => +// if pointer.equals(cell) then +// pointTo.remove(pointer) +// pointTo.update(collapedCell, pointee) +// } +// ) node.flags.collapsed = true @@ -319,7 +323,7 @@ class DSG(val proc: Procedure, node.cells.clear() node.cells.addOne(0, collapedCell) if cell.node.isDefined then - pointTo.update(node.cells(0), Slice(cell, pointeeInternalOffset)) + node.cells(0)._pointee = Some(Slice(cell, pointeeInternalOffset)) /** * this function merges all the overlapping cells in the given node @@ -347,18 +351,19 @@ class DSG(val proc: Procedure, */ def mergeNeighbours(cell1: DSC, cell2: DSC): DSC = require(cell1.node.equals(cell2.node) && cell1.offset < cell2.offset) - if pointTo.contains(cell2) then - if pointTo.contains(cell1) then - val slice1 = getPointee(cell1) - val slice2 = getPointee(cell2) - val result = mergeCells(getPointeeAdjusted(cell1), getPointeeAdjusted(cell2)) - assert(pointTo(cell1)._1.equals(result)) - pointTo.update(cell1, Slice(result, slice2.internalOffset.max(slice1.internalOffset))) + if cell2._pointee.isDefined then + if cell1._pointee.isDefined then + val slice1 = cell1.getPointee + val slice2 = cell2.getPointee + val result = mergeCells(adjust(slice1), adjust(slice2)) +// assert(pointTo(cell1)._1.equals(result)) + cell1._pointee = Some(Slice(result, slice2.internalOffset.max(slice1.internalOffset))) else - pointTo.update(cell1, getPointee(cell2)) - pointTo.remove(cell2) + cell1._pointee = cell2._pointee +// cell2._pointee = None val internalOffsetChange = cell2.offset - cell1.offset - replace(cell2, cell1, internalOffsetChange) + cell2.node.get.cells.remove(cell2.offset) +// replace(cell2, cell1, internalOffsetChange) cell1.growSize((cell2.offset - cell1.offset) + cell2.largestAccessedSize) // might cause another collapse cell1 @@ -366,6 +371,17 @@ class DSG(val proc: Procedure, // private val parent = mutable.Map[DSC, DSC]() val solver: DSAUnionFindSolver[UniTerm] = DSAUnionFindSolver() + def find(node: DSN) : Field = + val (n, offset) = solver.find(node.term) + val resultNode = n.asInstanceOf[Derm].node + Field(resultNode, offset) + + def find(cell: DSC) : DSC = + val node = cell.node.get + val offset = cell.offset + val parent: Field = find(node) + parent.node.addCell(cell.offset + parent.offset, cell.largestAccessedSize) + // val offsets = mutable.Map[DSN, BigInt]() @@ -389,7 +405,15 @@ class DSG(val proc: Procedure, * @param cell2 * @return the resulting cell in the unified node */ - def mergeCells(cell1: DSC, cell2: DSC): DSC = + def mergeCells(c1: DSC, c2: DSC): DSC = + + var cell1 = c1 + var cell2 = c2 + if c1.node.isDefined then + cell1 = find(c1) + + if c2.node.isDefined then + cell2 = find(c2) if cell1.equals(cell2) then // same cell no action required cell1 @@ -397,32 +421,38 @@ class DSG(val proc: Procedure, collapseNode(cell1.node.get) cell1.node.get.cells(0) else if cell1.node.isEmpty then - replace(cell1, cell2, 0) + ??? // not sure how to handle this yet TODO possibly take it out of the merge? +// replace(cell1, cell2, 0) cell2 else if cell1.node.get.collapsed || cell2.node.get.collapsed then // a collapsed node + val node1 = cell1.node.get val node2 = cell2.node.get - solver.unify(node1.term, node2.term, 0) + + assert(node1.collapsed || node2.collapsed) + collapseNode(node1) // collapse the other node collapseNode(node2) node2.children.addAll(node1.children) node2.children += (node1 -> 0) node2.allocationRegions.addAll(node1.allocationRegions) // add regions and flags of node 1 to node 2 node2.flags.join(node1.flags) - if pointTo.contains(node1.cells(0)) then // merge the pointees of the two collapsed (single cell) nodes - if pointTo.contains(node2.cells(0)) then - val slice1 = getPointee(node1.cells(0)) - val slice2 = getPointee(node2.cells(0)) - val result = mergeCells(getPointeeAdjusted(node1.cells(0)), getPointeeAdjusted(node2.cells(0))) - pointTo.update(node2.cells(0), Slice(result, slice1.internalOffset.max(slice2.internalOffset))) + if node1.cells(0)._pointee.isDefined then // merge the pointees of the two collapsed (single cell) nodes + if node2.cells(0)._pointee.isDefined then + val slice1 = node1.cells(0).getPointee + val slice2 = node2.cells(0).getPointee + val result = mergeCells(adjust(slice1), adjust(slice2)) + node2.cells(0)._pointee = Some(Slice(result, slice1.internalOffset.max(slice2.internalOffset))) else - pointTo.update(node2.cells(0), getPointee(node1.cells(0))) - pointTo.remove(node1.cells(0)) - replace(node1.cells(0), node2.cells(0), 0) + node2.cells(0)._pointee = node1.cells(0)._pointee +// node1.cells(0)._pointee = None +// replace(node1.cells(0), node2.cells(0), 0) + solver.unify(node1.term, node2.term, 0) node2.cells(0) else // standard merge // node 1 is the cell with the higher offset + var delta = cell1.offset - cell2.offset var node1 = cell1.node.get var node2 = cell2.node.get @@ -481,23 +511,23 @@ class DSG(val proc: Procedure, val outgoing: Set[Slice] = cells.foldLeft(Set[Slice]()){ (set, cell) => // replace incoming edges - if cell.node.get.equals(node2) then - replace(cell, collapsedCell, delta + cell.offset - offset) - else - assert(cell.node.get.equals(node1)) - replace(cell, collapsedCell, cell.offset - offset) +// if cell.node.get.equals(node2) then +// replace(cell, collapsedCell, delta + cell.offset - offset) +// else +// assert(cell.node.get.equals(node1)) +// replace(cell, collapsedCell, cell.offset - offset) // collect outgoing edges - if pointTo.contains(cell) then - val pointee = getPointee(cell) - pointTo.remove(cell) + if cell._pointee.isDefined then + val pointee = cell.getPointee +// cell._pointee = None set + pointee else set } // replace outgoing edges if outgoing.size == 1 then - pointTo.update(collapsedCell, outgoing.head) + collapsedCell._pointee = Some(outgoing.head) else if outgoing.size > 1 then var internal = outgoing.head._2 val result = outgoing.tail.foldLeft(outgoing.head._1){ @@ -507,7 +537,7 @@ class DSG(val proc: Procedure, internal = internal.max(pointeeInternal) mergeCells(result, cell) } - pointTo.update(collapsedCell, Slice(result, internal)) + collapsedCell._pointee = Some(Slice(result, internal)) } solver.unify(node1.term, resultNode.term, 0) @@ -518,6 +548,16 @@ class DSG(val proc: Procedure, resultNode.getCell(cell2.offset) + def adjust(cell: DSC, internalOffset: BigInt): DSC = + val link = solver.find(cell.node.get.term) + val node = link._1.asInstanceOf[Derm].node + val linkOffset = link._2 + node.addCell(cell.offset + internalOffset + linkOffset, 0) + + def adjust(slice: Slice, offset: BigInt = 0): DSC = + val cell = slice.cell + val internal = slice.internalOffset + adjust(cell, internal + offset) private def isFormal(pos: CFGPosition, variable: Variable): Boolean = !reachingDefs(pos).contains(variable) @@ -610,21 +650,21 @@ class DSG(val proc: Procedure, newGraph.globalMapping.update(range, Field(idToNode(node.id), offset)) } - newGraph.pointTo.clear() - pointTo.foreach { - case (cell1: DSC, slice: Slice) => - val node1 = cell1.node.get - val node2 = slice.node - if !idToNode.contains(node1.id) then - val newNode1 = node1.cloneSelf(newGraph) - idToNode.update(node1.id, newNode1) - - if !idToNode.contains(node2.id) then - val newNode2 = node2.cloneSelf(newGraph) - idToNode.update(node2.id, newNode2) - - newGraph.pointTo.update(idToNode(node1.id).cells(cell1.offset), Slice(idToNode(node2.id).cells(slice.offset), slice.internalOffset)) - } +// newGraph.pointTo.clear() +// pointTo.foreach { +// case (cell1: DSC, slice: Slice) => +// val node1 = cell1.node.get +// val node2 = slice.node +// if !idToNode.contains(node1.id) then +// val newNode1 = node1.cloneSelf(newGraph) +// idToNode.update(node1.id, newNode1) +// +// if !idToNode.contains(node2.id) then +// val newNode2 = node2.cloneSelf(newGraph) +// idToNode.update(node2.id, newNode2) +// +// newGraph.pointTo.update(idToNode(node1.id).cells(cell1.offset), Slice(idToNode(node2.id).cells(slice.offset), slice.internalOffset)) +// } callsites.foreach( callSite => @@ -704,7 +744,7 @@ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCount var result: Option[DSC] = None cells.foreach { case (start: BigInt, cell: DSC) => - if start < offset && offset < (start + cell.largestAccessedSize) then + if start <= offset && offset < (start + cell.largestAccessedSize) then result = Some(cell) } result match @@ -768,13 +808,13 @@ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCount ) } - cells.foreach { - case (offset: BigInt, cell: DSC) => - if from.pointTo.contains(cell) then - val pointee = from.getPointee(cell) - pointee._1.node.get.cloneNode(from, to) - to.pointTo.update(cell, pointee) - } +// cells.foreach { +// case (offset: BigInt, cell: DSC) => +// if from.pointTo.contains(cell) then +// val pointee = from.getPointee(cell) +// pointee._1.node.get.cloneNode(from, to) +// to.pointTo.update(cell, pointee) +// } override def equals(obj: Any): Boolean = obj match @@ -785,6 +825,8 @@ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCount override def hashCode(): Int = id override def toString: String = s"Node($id, $allocationRegions ${if collapsed then ", collapsed" else ""})" + + } /** @@ -796,6 +838,23 @@ case class DSC(node: Option[DSN], offset: BigInt) { var largestAccessedSize: BigInt = 0 + var _pointee : Option[Slice] = None + + def getPointee : Slice = + if _pointee.isEmpty then + val node = DSN(Some(this.node.get.graph.get)) + _pointee = Some(Slice(node.cells(0), 0)) + else + val slice = _pointee.get + var node = slice.node + val graph = node.graph.get + val link = graph.solver.find(node.term) + node = link._1.asInstanceOf[Derm].node + val offset = link._2 + val cell = node.addCell(offset + slice.cell.offset, slice.cell.largestAccessedSize) + _pointee = Some(Slice(cell, slice.internalOffset)) + _pointee.get + def growSize(size: BigInt): Boolean = if size > largestAccessedSize then largestAccessedSize = size @@ -855,14 +914,7 @@ def unwrapPaddingAndSlicing(expr: Expr): Expr = case ZeroExtend(extension, body) => unwrapPaddingAndSlicing(body) case _ => expr -def adjust(cell: DSC, internalOffset: BigInt): DSC = - val node = cell.node.get - node.addCell(cell.offset+internalOffset, 0) -def adjust(slice: Slice): DSC = - val cell = slice.cell - val internal = slice.internalOffset - adjust(cell, internal) /** Terms used in unification. */ diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index a46b59f6b..7dc1e2974 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -147,14 +147,14 @@ class Local( field = 0 graph.mergeCells(c, if pointee then - graph.getPointeeAdjusted(node.getCell(field)) + graph.adjust(node.getCell(field).getPointee) else node.getCell(field) ) else val node = cell.node.get graph.collapseNode(node) - graph.mergeCells(c, if pointee then graph.getPointeeAdjusted(node.cells(0)) else node.cells(0)) + graph.mergeCells(c, if pointee then graph.adjust(node.cells(0).getPointee) else node.cells(0)) } if pointee then @@ -163,11 +163,11 @@ class Local( val offset = t._1.offset val internalOffset = t._2 val node = t._1.node.get - val cell = node.getCell(offset + internalOffset) - if graph.pointTo.contains(cell) && graph.pointTo(cell)._1.equals(result) then + val cell = graph.find(node.getCell(offset + internalOffset)) + if cell._pointee.isDefined && graph.find(cell.getPointee._1).equals(result) then graph.selfCollapse(node) - assert(graph.pointTo.contains(node.getCell(offset))) - result = graph.getPointee(node.getCell(offset))._1 +// assert(graph.pointTo.contains(node.getCell(offset))) TODO + result = graph.find(graph.find(node.getCell(offset)).getPointee._1) else graph.selfCollapse(node) ) @@ -222,16 +222,16 @@ class Local( graph.callsites.add(cs) cs.paramCells.foreach{ case (variable: Variable, slice: Slice) => - visitPointerArithmeticOperation(call, adjust(slice), variable, 0) + visitPointerArithmeticOperation(call, graph.adjust(slice), variable, 0) } cs.returnCells.foreach{ case (variable: Variable, slice: Slice) => val returnArgument = graph.varToCell(n)(variable) - graph.mergeCells(adjust(returnArgument), adjust(slice)) + graph.mergeCells(graph.adjust(returnArgument), graph.adjust(slice)) } case Assign(variable: Variable, rhs: Expr, maybeString) => val expr: Expr = unwrapPaddingAndSlicing(rhs) - val lhsCell = adjust(graph.varToCell(n)(variable)) + val lhsCell = graph.adjust(graph.varToCell(n)(variable)) var global = isGlobal(rhs, n) var stack = isStack(rhs, n) if global.isDefined then // Rx = global address @@ -267,9 +267,9 @@ class Local( global = isGlobal(index, n, byteSize) stack = isStack(index, n) if global.isDefined then - graph.mergeCells(lhsCell, graph.getPointeeAdjusted(global.get)) + graph.mergeCells(lhsCell,graph.adjust(graph.find(global.get).getPointee)) else if stack.isDefined then - graph.mergeCells(lhsCell, graph.getPointeeAdjusted(stack.get)) + graph.mergeCells(lhsCell, graph.adjust(graph.find(stack.get).getPointee)) else index match case BinaryExpr(op, arg1: Variable, arg2) => @@ -298,9 +298,9 @@ class Local( val stack = isStack(index, n) val addressPointee: DSC = if global.isDefined then - graph.getPointeeAdjusted(global.get) + graph.adjust(graph.find(global.get).getPointee) else if stack.isDefined then - graph.getPointeeAdjusted(stack.get) + graph.adjust(graph.find(stack.get).getPointee) else index match case BinaryExpr(op, arg1: Variable, arg2) => @@ -322,9 +322,11 @@ class Local( val valueCells = graph.getCells(n, value) val result = valueCells.foldLeft(addressPointee) { (c, slice) => - graph.mergeCells(adjust(slice), c) + graph.mergeCells(graph.adjust(slice), c) } + print("") + case _ => } def analyze(): DSG = diff --git a/src/test/scala/LocalTest.scala b/src/test/scala/LocalTest.scala index 88dbb7e6a..a9f9bcc21 100644 --- a/src/test/scala/LocalTest.scala +++ b/src/test/scala/LocalTest.scala @@ -1,4 +1,4 @@ -import analysis.{AddressRange, DSC, DSG, DSN, DataLocation, HeapLocation} +import analysis.{AddressRange, DSC, DSG, DSN, DataLocation, Derm, HeapLocation} import ir.Endian.BigEndian import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, ConvertToSingleProcedureReturn, DirectCall, Memory, MemoryAssign, MemoryLoad, SharedMemory} import org.scalatest.funsuite.AnyFunSuite @@ -26,831 +26,834 @@ class LocalTest extends AnyFunSuite, TestUtil { ) val program = results.ir.program val dsg = results.analysis.get.locals.get(program.mainProcedure) - assert(dsg.pointTo.size == 12) // 12 - val framePointer = dsg.stackMapping(0).cells(0) // R31 - assert(dsg.pointTo(framePointer)._1.equals(dsg.formals(R29)._1)) - val stack8 = dsg.stackMapping(8).cells(0) // R31 + 8 - assert(dsg.pointTo(stack8)._1.equals(dsg.formals(R30)._1)) - val stack40 = dsg.stackMapping(40).cells(0) // R31 + 40 - val stack32 = dsg.stackMapping(32).cells(0) // R31 + 32 - val stack24 = dsg.stackMapping(24).cells(0) // R31 + 24 and Malloc - assert(dsg.pointTo(stack32)._1.equals(stack24)) +// assert(dsg.pointTo.size == 12) // 12 + + val framePointer = dsg.find(dsg.adjust(dsg.find(dsg.stackMapping(0).cells(0)).getPointee)) + val R29formal = dsg.find(dsg.adjust(dsg.formals(R29))) + assert(framePointer.equals(R29formal)) + val stack8 = dsg.find(dsg.stackMapping(8).cells(0)) // R31 + 8 + assert(dsg.find(dsg.adjust(stack8.getPointee)).equals(dsg.find(dsg.adjust(dsg.formals(R30))))) + val stack40 = dsg.find(dsg.stackMapping(40).cells(0))// R31 + 40 + val stack32 = dsg.find(dsg.stackMapping(32).cells(0)) // R31 + 32 + val stack24 = dsg.find(dsg.stackMapping(24).cells(0)) // R31 + 24 and Malloc + assert(dsg.find(dsg.adjust(stack32.getPointee)).equals(stack24)) assert(stack24.node.get.collapsed) - assert(dsg.pointTo(stack24)._1.equals(stack24)) + assert(dsg.find(dsg.adjust(stack24.getPointee)).equals(stack24)) - assert(dsg.pointTo(stack40).equals(dsg.getPointee(dsg.getPointee(dsg.globalMapping(AddressRange(69600, 69600))._1.cells(0))._1))) +// assert((stack40.getPointee).equals(((dsg.globalMapping(AddressRange(69600, 69600))._1.cells(0).getPointee)._1.getPointee))) } - - test("local jumptable2 sub_seven") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.locals.get(program.procs("sub_seven")) - assert(dsg.pointTo.size == 9) - assert(dsg.stackMapping.isEmpty) - println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) - - // initial global mappings - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) - - } - - test("local jumptable2 add_six") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.locals.get(program.procs("add_six")) - assert(dsg.pointTo.size == 9) - assert(dsg.stackMapping.isEmpty) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) - - // initial global mappings - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) - - } - - test("local jumptable2 add_two") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.locals.get(program.procs("add_two")) - assert(dsg.pointTo.size == 9) - assert(dsg.stackMapping.isEmpty) - println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) - - // initial global mappings - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) - - } - - test("local jumptable2 main") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - - - val program = results.ir.program - val dsg = results.analysis.get.locals.get(program.mainProcedure) - assert(dsg.pointTo.size == 12) // 12 - val framePointer = dsg.stackMapping(0).cells(0) - val stack8 = dsg.stackMapping(8).cells(0) - val stack16 = dsg.stackMapping(16).cells(0) - val stack28 = dsg.stackMapping(28).cells(0) - assert(dsg.pointTo(framePointer).equals(dsg.formals(R29))) - assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) - assert(dsg.pointTo(stack16).equals(dsg.formals(R1))) - assert(dsg.pointTo(stack28).equals(dsg.formals(R0))) - - // initial global mappings - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) - - - } - - - - ignore("local jumptable2_clang main") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2_clang.adt", - relfFile = "examples/jumptable2/jumptable2_clang.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.locals.get(program.mainProcedure) -// assert(dsg.pointTo.size == 7) -// assert(dsg.stackMapping.isEmpty) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69680, 69684))._1.cells(0))._1.node.get.collapsed) - } - - - - - ignore("interproc unsafe pointer arithmetic") { - // test interproc unification with points-to that have internal offsets into cells - } - - - test("unsafe pointer arithmetic") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.adt", - relfFile = "examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.locals.get(program.mainProcedure) - val stack0 = dsg.stackMapping(0).cells(0) - val stack8 = dsg.stackMapping(8).cells(0) - val stack24 = dsg.stackMapping(24).cells(0) - val stack32 = dsg.stackMapping(32).cells(0) - val stack40 = dsg.stackMapping(40).cells(0) - val stack48 = dsg.stackMapping(48).cells(0) - val stack56 = dsg.stackMapping(56).cells(0) - assert(dsg.pointTo.size==10) - assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) - assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) - assert(dsg.pointTo(stack24).equals(dsg.pointTo(stack32))) - assert(dsg.pointTo(stack24)._2 == 0) - assert(dsg.pointTo(stack24)._1.node.get.allocationRegions.size == 1) - assert(dsg.pointTo(stack24)._1.node.get.allocationRegions.head.asInstanceOf[HeapLocation].size == 20) - assert(dsg.pointTo(stack40)._1.node.get.allocationRegions.size == 1) - assert(dsg.pointTo(stack48)._1.node.get.allocationRegions.head.asInstanceOf[HeapLocation].size == 8) - assert(dsg.pointTo(dsg.pointTo(stack48)._1.node.get.cells(0)).equals(dsg.pointTo(stack40))) - assert(dsg.pointTo(dsg.pointTo(stack48)._1.node.get.cells(0)).equals(dsg.pointTo(stack56))) - assert(dsg.pointTo(stack24)._1.equals(dsg.pointTo(stack40)._1)) - assert(dsg.pointTo(stack40)._2 == 1) - } - - test("interproc pointer arithmetic main") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", - relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.locals.get(program.mainProcedure) - val stack0 = dsg.stackMapping(0).cells(0) - val stack8 = dsg.stackMapping(8).cells(0) - val stack24 = dsg.stackMapping(24).cells(0) - val stack32 = dsg.stackMapping(32).cells(0) - val stack40 = dsg.stackMapping(40).cells(0) - assert(dsg.pointTo.size == 9) - assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) - assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) - assert(dsg.pointTo(stack24)._1.node.get.equals(dsg.pointTo(stack32)._1.node.get)) - assert(dsg.pointTo(stack24)._1.offset == 0) - assert(dsg.pointTo(stack32)._1.offset == 16) - assert(dsg.pointTo.contains(dsg.pointTo(stack40)._1)) - assert(!dsg.pointTo(stack40)._1.node.get.equals(dsg.pointTo(stack24)._1.node.get)) - } - - test("interproc pointer arithmetic callee") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", - relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.locals.get(program.procs("callee")) - val stack8 = dsg.stackMapping(8).cells(0) // R31 + 8 - val stack24 = dsg.stackMapping(24).cells(0) // R31 + 24 - assert(dsg.pointTo.size == 3) - assert(dsg.getPointee(stack8).equals(dsg.formals(R0))) - assert(dsg.getPointee(stack8)._1.offset == 0) - assert(dsg.getPointee(stack24)._1.equals(dsg.formals(R0)._1.node.get.cells(16))) - } - - - test("internal merge") { - val mem = SharedMemory("mem", 10000, 10000) - val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) - val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) - var program = prog( - proc("main", - block("operations", -// Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), - locAssign1, - locAssign2, - MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), - MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), - ret - ) - ) - ) - - val returnUnifier = ConvertToSingleProcedureReturn() - program = returnUnifier.visitProgram(program) - - val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) - val dsg: DSG = results.locals.get(program.mainProcedure) - assert(dsg.formals(R1).equals(dsg.formals(R2))) - assert(dsg.varToCell(locAssign1)(R6)._1.equals(dsg.varToCell(locAssign2)(R7)._1)) - assert(dsg.varToCell(locAssign1)(R6)._2 == 0) - assert(dsg.varToCell(locAssign2)(R7)._2 == 1) - assert(dsg.pointTo.contains(dsg.varToCell(locAssign1)(R6)._1)) - assert(dsg.pointTo(dsg.varToCell(locAssign1)(R6)._1)._1.equals(dsg.formals(R1)._1)) - assert(dsg.pointTo.size == 1) - - } - - test("offsetting from middle of cell to a new cell") { - val mem = SharedMemory("mem", 10000, 10000) - val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) - val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) - val locAssign3 = Assign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(8, 64)), Some("00005")) - - var program = prog( - proc("main", - block("operations", - locAssign1, - locAssign2, - MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), - MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), - locAssign3, - ret - ) - ) - ) - - val returnUnifier = ConvertToSingleProcedureReturn() - program = returnUnifier.visitProgram(program) - - val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) - val dsg: DSG = results.locals.get(program.mainProcedure) - assert(dsg.varToCell(locAssign3)(R5)._1.offset == 13) - } - - test("offsetting from middle of cell to the same cell") { - val mem = SharedMemory("mem", 10000, 10000) - val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) - val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) - val locAssign3 = Assign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(7, 64)), Some("00005")) - - var program = prog( - proc("main", - block("operations", - // Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), - locAssign1, - locAssign2, - MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), - MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), - locAssign3, - ret - ) - ) - ) - - val returnUnifier = ConvertToSingleProcedureReturn() - program = returnUnifier.visitProgram(program) - - val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) - val dsg: DSG = results.locals.get(program.mainProcedure) - assert(dsg.formals(R1).equals(dsg.formals(R2))) - assert(dsg.varToCell(locAssign1)(R6)._1.equals(dsg.varToCell(locAssign2)(R7)._1)) - assert(dsg.varToCell(locAssign1)(R6)._1.equals(dsg.varToCell(locAssign3)(R5)._1)) - assert(dsg.varToCell(locAssign1)(R6)._2 == 0) - assert(dsg.varToCell(locAssign2)(R7)._2 == 1) - assert(dsg.varToCell(locAssign3)(R5)._2 == 8) - assert(dsg.pointTo.contains(dsg.varToCell(locAssign1)(R6)._1)) - assert(dsg.pointTo(dsg.varToCell(locAssign1)(R6)._1)._1.equals(dsg.formals(R1)._1)) - assert(dsg.pointTo.size == 1) - } - - test("internal offset transfer") { - val mem = SharedMemory("mem", 10000, 10000) - val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) - val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) - val locAssign3 = Assign(R5, R7, Some("00005")) - - var program = prog( - proc("main", - block("operations", - // Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), - locAssign1, - locAssign2, - MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), - MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), - locAssign3, - ret - ) - ) - ) - - val returnUnifier = ConvertToSingleProcedureReturn() - program = returnUnifier.visitProgram(program) - - val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) - val dsg: DSG = results.locals.get(program.mainProcedure) - assert(dsg.varToCell(locAssign2)(R7).equals(dsg.varToCell(locAssign3)(R5))) - } - - // bottom up tests - test("bottom up jumptable2 sub_seven") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.bus.get(program.procs("sub_seven")) - assert(dsg.pointTo.size == 9) - assert(dsg.stackMapping.isEmpty) - println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) - - // initial global mappings - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) - - - } - - test("bottom up jumptable2 add_six") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.bus.get(program.procs("add_six")) - assert(dsg.pointTo.size == 9) - assert(dsg.stackMapping.isEmpty) - println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) - - // initial global mappings - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) - - } - - test("bottomup jumptable2 add_two") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.bus.get(program.procs("add_two")) - assert(dsg.pointTo.size == 9) - assert(dsg.stackMapping.isEmpty) - println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) - - // initial global mappings - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) - - } - - test("bottom up jumptable2 main") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - - - val program = results.ir.program - val dsg = results.analysis.get.bus.get(program.mainProcedure) - assert(dsg.pointTo.size == 13) // 13 - val framePointer = dsg.stackMapping(0).cells(0) - val stack8 = dsg.stackMapping(8).cells(0) - val stack16 = dsg.stackMapping(16).cells(0) - val stack28 = dsg.stackMapping(28).cells(0) - assert(dsg.pointTo(framePointer).equals(dsg.formals(R29))) - assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) - assert(dsg.pointTo(stack16).equals(dsg.formals(R1))) - assert(dsg.pointTo(stack28).equals(dsg.formals(R0))) - - // initial global mappings - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) - - // bu - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))._1.node.get.collapsed) - - } - - - - test("bottom up interproc pointer arithmetic callee") { - // same as interproc pointer arithmetic callee's local graph (no changes should have been made) - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", - relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.bus.get(program.procs("callee")) - val stack8 = dsg.stackMapping(8).cells(0) // R31 + 8 - val stack24 = dsg.stackMapping(24).cells(0) // R31 + 24 - assert(dsg.pointTo.size == 3) - assert(dsg.getPointee(stack8).equals(dsg.formals(R0))) - assert(dsg.getPointee(stack8)._1.offset == 0) - assert(dsg.getPointee(stack24)._1.equals(dsg.formals(R0)._1.node.get.cells(16))) - } - - - test("bottom up interproc pointer arithmetic main") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", - relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.bus.get(program.mainProcedure) - val stack0 = dsg.stackMapping(0).cells(0) - val stack8 = dsg.stackMapping(8).cells(0) - val stack24 = dsg.stackMapping(24).cells(0) - val stack32 = dsg.stackMapping(32).cells(0) - val stack40 = dsg.stackMapping(40).cells(0) - assert(dsg.pointTo.size == 9) - assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) - assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) - assert(dsg.pointTo(stack24)._1.node.get.equals(dsg.pointTo(stack32)._1.node.get)) - assert(dsg.pointTo(stack24)._1.offset == 0) - assert(dsg.pointTo(stack32)._1.offset == 16) - assert(dsg.pointTo.contains(dsg.pointTo(stack40)._1)) - assert(dsg.pointTo(stack40)._1.node.get.equals(dsg.pointTo(stack24)._1.node.get)) - assert(dsg.pointTo(stack40)._1.offset == 32) - assert(dsg.pointTo(stack40)._2 == 0) - assert(dsg.pointTo(stack32)._2 == 0) - assert(dsg.pointTo(stack24)._2 == 0) - } - - - // top down tests - test("top down jumptable2 main") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - - val program = results.ir.program - val dsg = results.analysis.get.tds.get(program.mainProcedure) - assert(dsg.pointTo.size == 13) // 13 - val framePointer = dsg.stackMapping(0).cells(0) - val stack8 = dsg.stackMapping(8).cells(0) - val stack16 = dsg.stackMapping(16).cells(0) - val stack28 = dsg.stackMapping(28).cells(0) - assert(dsg.pointTo(framePointer).equals(dsg.formals(R29))) - assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) - assert(dsg.pointTo(stack16).equals(dsg.formals(R1))) - assert(dsg.pointTo(stack28).equals(dsg.formals(R0))) - - // initial global mappings - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) - - // bu - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))._1.node.get.collapsed) - } - - test("top down jumptable2 sub_seven") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.tds.get(program.procs("sub_seven")) - assert(dsg.pointTo.size == 9) - assert(dsg.stackMapping.isEmpty) - println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) - - // initial global mappings - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) - - - } - - test("top down jumptable2 add_six") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.tds.get(program.procs("add_six")) - assert(dsg.pointTo.size == 9) - assert(dsg.stackMapping.isEmpty) - println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) - - // initial global mappings - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) - - } - - test("top down jumptable2 add_two") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.tds.get(program.procs("add_two")) - assert(dsg.pointTo.size == 9) - assert(dsg.stackMapping.isEmpty) - println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) - - // initial global mappings - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) - assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) - - } - - test("top down interproc pointer arithmetic callee") { - // same as interproc pointer arithmetic callee's local graph (no changes should have been made) - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", - relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.tds.get(program.procs("callee")) - val stack8 = dsg.stackMapping(8).cells(0) // R31 + 8 - val stack24 = dsg.stackMapping(24).cells(0) // R31 + 24 - assert(dsg.pointTo.size == 6) - assert(dsg.getPointee(stack8).equals(dsg.formals(R0))) - assert(dsg.getPointee(stack8)._1.offset == 16) - assert(dsg.getPointee(stack24)._1.equals(dsg.formals(R0)._1.node.get.cells(32))) - } - - - // top down phase should be the same as bu phase - test("top down interproc pointer arithmetic main") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", - relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.tds.get(program.mainProcedure) - val stack0 = dsg.stackMapping(0).cells(0) - val stack8 = dsg.stackMapping(8).cells(0) - val stack24 = dsg.stackMapping(24).cells(0) - val stack32 = dsg.stackMapping(32).cells(0) - val stack40 = dsg.stackMapping(40).cells(0) - assert(dsg.pointTo.size == 9) - assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) - assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) - assert(dsg.pointTo(stack24)._1.node.get.equals(dsg.pointTo(stack32)._1.node.get)) - assert(dsg.pointTo(stack24)._1.offset == 0) - assert(dsg.pointTo(stack32)._1.offset == 16) - assert(dsg.pointTo.contains(dsg.pointTo(stack40)._1)) - assert(dsg.pointTo(stack40)._1.node.get.equals(dsg.pointTo(stack24)._1.node.get)) - assert(dsg.pointTo(stack40)._1.offset == 32) - assert(dsg.pointTo(stack40)._2 == 0) - assert(dsg.pointTo(stack32)._2 == 0) - assert(dsg.pointTo(stack24)._2 == 0) - } - } +// +// test("local jumptable2 sub_seven") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2.adt", +// relfFile = "examples/jumptable2/jumptable2.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.locals.get(program.procs("sub_seven")) +// assert(dsg.pointTo.size == 9) +// assert(dsg.stackMapping.isEmpty) +// println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) +// +// // initial global mappings +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) +// +// } +// +// test("local jumptable2 add_six") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2.adt", +// relfFile = "examples/jumptable2/jumptable2.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.locals.get(program.procs("add_six")) +// assert(dsg.pointTo.size == 9) +// assert(dsg.stackMapping.isEmpty) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) +// +// // initial global mappings +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) +// +// } +// +// test("local jumptable2 add_two") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2.adt", +// relfFile = "examples/jumptable2/jumptable2.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.locals.get(program.procs("add_two")) +// assert(dsg.pointTo.size == 9) +// assert(dsg.stackMapping.isEmpty) +// println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) +// +// // initial global mappings +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) +// +// } +// +// test("local jumptable2 main") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2.adt", +// relfFile = "examples/jumptable2/jumptable2.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// +// +// val program = results.ir.program +// val dsg = results.analysis.get.locals.get(program.mainProcedure) +// assert(dsg.pointTo.size == 12) // 12 +// val framePointer = dsg.stackMapping(0).cells(0) +// val stack8 = dsg.stackMapping(8).cells(0) +// val stack16 = dsg.stackMapping(16).cells(0) +// val stack28 = dsg.stackMapping(28).cells(0) +// assert(dsg.pointTo(framePointer).equals(dsg.formals(R29))) +// assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) +// assert(dsg.pointTo(stack16).equals(dsg.formals(R1))) +// assert(dsg.pointTo(stack28).equals(dsg.formals(R0))) +// +// // initial global mappings +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) +// +// +// } +// +// +// +// ignore("local jumptable2_clang main") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2_clang.adt", +// relfFile = "examples/jumptable2/jumptable2_clang.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.locals.get(program.mainProcedure) +//// assert(dsg.pointTo.size == 7) +//// assert(dsg.stackMapping.isEmpty) +//// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69680, 69684))._1.cells(0))._1.node.get.collapsed) +// } +// +// +// +// +// ignore("interproc unsafe pointer arithmetic") { +// // test interproc unification with points-to that have internal offsets into cells +// } +// +// +// test("unsafe pointer arithmetic") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.adt", +// relfFile = "examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.locals.get(program.mainProcedure) +// val stack0 = dsg.stackMapping(0).cells(0) +// val stack8 = dsg.stackMapping(8).cells(0) +// val stack24 = dsg.stackMapping(24).cells(0) +// val stack32 = dsg.stackMapping(32).cells(0) +// val stack40 = dsg.stackMapping(40).cells(0) +// val stack48 = dsg.stackMapping(48).cells(0) +// val stack56 = dsg.stackMapping(56).cells(0) +// assert(dsg.pointTo.size==10) +// assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) +// assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) +// assert(dsg.pointTo(stack24).equals(dsg.pointTo(stack32))) +// assert(dsg.pointTo(stack24)._2 == 0) +// assert(dsg.pointTo(stack24)._1.node.get.allocationRegions.size == 1) +// assert(dsg.pointTo(stack24)._1.node.get.allocationRegions.head.asInstanceOf[HeapLocation].size == 20) +// assert(dsg.pointTo(stack40)._1.node.get.allocationRegions.size == 1) +// assert(dsg.pointTo(stack48)._1.node.get.allocationRegions.head.asInstanceOf[HeapLocation].size == 8) +// assert(dsg.pointTo(dsg.pointTo(stack48)._1.node.get.cells(0)).equals(dsg.pointTo(stack40))) +// assert(dsg.pointTo(dsg.pointTo(stack48)._1.node.get.cells(0)).equals(dsg.pointTo(stack56))) +// assert(dsg.pointTo(stack24)._1.equals(dsg.pointTo(stack40)._1)) +// assert(dsg.pointTo(stack40)._2 == 1) +// } +// +// test("interproc pointer arithmetic main") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", +// relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.locals.get(program.mainProcedure) +// val stack0 = dsg.stackMapping(0).cells(0) +// val stack8 = dsg.stackMapping(8).cells(0) +// val stack24 = dsg.stackMapping(24).cells(0) +// val stack32 = dsg.stackMapping(32).cells(0) +// val stack40 = dsg.stackMapping(40).cells(0) +// assert(dsg.pointTo.size == 9) +// assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) +// assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) +// assert(dsg.pointTo(stack24)._1.node.get.equals(dsg.pointTo(stack32)._1.node.get)) +// assert(dsg.pointTo(stack24)._1.offset == 0) +// assert(dsg.pointTo(stack32)._1.offset == 16) +// assert(dsg.pointTo.contains(dsg.pointTo(stack40)._1)) +// assert(!dsg.pointTo(stack40)._1.node.get.equals(dsg.pointTo(stack24)._1.node.get)) +// } +// +// test("interproc pointer arithmetic callee") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", +// relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.locals.get(program.procs("callee")) +// val stack8 = dsg.stackMapping(8).cells(0) // R31 + 8 +// val stack24 = dsg.stackMapping(24).cells(0) // R31 + 24 +// assert(dsg.pointTo.size == 3) +// assert(dsg.getPointee(stack8).equals(dsg.formals(R0))) +// assert(dsg.getPointee(stack8)._1.offset == 0) +// assert(dsg.getPointee(stack24)._1.equals(dsg.formals(R0)._1.node.get.cells(16))) +// } +// +// +// test("internal merge") { +// val mem = SharedMemory("mem", 10000, 10000) +// val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) +// val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) +// var program = prog( +// proc("main", +// block("operations", +//// Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), +// locAssign1, +// locAssign2, +// MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), +// MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), +// ret +// ) +// ) +// ) +// +// val returnUnifier = ConvertToSingleProcedureReturn() +// program = returnUnifier.visitProgram(program) +// +// val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) +// val dsg: DSG = results.locals.get(program.mainProcedure) +// assert(dsg.formals(R1).equals(dsg.formals(R2))) +// assert(dsg.varToCell(locAssign1)(R6)._1.equals(dsg.varToCell(locAssign2)(R7)._1)) +// assert(dsg.varToCell(locAssign1)(R6)._2 == 0) +// assert(dsg.varToCell(locAssign2)(R7)._2 == 1) +// assert(dsg.pointTo.contains(dsg.varToCell(locAssign1)(R6)._1)) +// assert(dsg.pointTo(dsg.varToCell(locAssign1)(R6)._1)._1.equals(dsg.formals(R1)._1)) +// assert(dsg.pointTo.size == 1) +// +// } +// +// test("offsetting from middle of cell to a new cell") { +// val mem = SharedMemory("mem", 10000, 10000) +// val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) +// val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) +// val locAssign3 = Assign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(8, 64)), Some("00005")) +// +// var program = prog( +// proc("main", +// block("operations", +// locAssign1, +// locAssign2, +// MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), +// MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), +// locAssign3, +// ret +// ) +// ) +// ) +// +// val returnUnifier = ConvertToSingleProcedureReturn() +// program = returnUnifier.visitProgram(program) +// +// val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) +// val dsg: DSG = results.locals.get(program.mainProcedure) +// assert(dsg.varToCell(locAssign3)(R5)._1.offset == 13) +// } +// +// test("offsetting from middle of cell to the same cell") { +// val mem = SharedMemory("mem", 10000, 10000) +// val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) +// val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) +// val locAssign3 = Assign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(7, 64)), Some("00005")) +// +// var program = prog( +// proc("main", +// block("operations", +// // Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), +// locAssign1, +// locAssign2, +// MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), +// MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), +// locAssign3, +// ret +// ) +// ) +// ) +// +// val returnUnifier = ConvertToSingleProcedureReturn() +// program = returnUnifier.visitProgram(program) +// +// val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) +// val dsg: DSG = results.locals.get(program.mainProcedure) +// assert(dsg.formals(R1).equals(dsg.formals(R2))) +// assert(dsg.varToCell(locAssign1)(R6)._1.equals(dsg.varToCell(locAssign2)(R7)._1)) +// assert(dsg.varToCell(locAssign1)(R6)._1.equals(dsg.varToCell(locAssign3)(R5)._1)) +// assert(dsg.varToCell(locAssign1)(R6)._2 == 0) +// assert(dsg.varToCell(locAssign2)(R7)._2 == 1) +// assert(dsg.varToCell(locAssign3)(R5)._2 == 8) +// assert(dsg.pointTo.contains(dsg.varToCell(locAssign1)(R6)._1)) +// assert(dsg.pointTo(dsg.varToCell(locAssign1)(R6)._1)._1.equals(dsg.formals(R1)._1)) +// assert(dsg.pointTo.size == 1) +// } +// +// test("internal offset transfer") { +// val mem = SharedMemory("mem", 10000, 10000) +// val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) +// val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) +// val locAssign3 = Assign(R5, R7, Some("00005")) +// +// var program = prog( +// proc("main", +// block("operations", +// // Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), +// locAssign1, +// locAssign2, +// MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), +// MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), +// locAssign3, +// ret +// ) +// ) +// ) +// +// val returnUnifier = ConvertToSingleProcedureReturn() +// program = returnUnifier.visitProgram(program) +// +// val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) +// val dsg: DSG = results.locals.get(program.mainProcedure) +// assert(dsg.varToCell(locAssign2)(R7).equals(dsg.varToCell(locAssign3)(R5))) +// } +// +// // bottom up tests +// test("bottom up jumptable2 sub_seven") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2.adt", +// relfFile = "examples/jumptable2/jumptable2.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.bus.get(program.procs("sub_seven")) +// assert(dsg.pointTo.size == 9) +// assert(dsg.stackMapping.isEmpty) +// println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) +// +// // initial global mappings +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) +// +// +// } +// +// test("bottom up jumptable2 add_six") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2.adt", +// relfFile = "examples/jumptable2/jumptable2.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.bus.get(program.procs("add_six")) +// assert(dsg.pointTo.size == 9) +// assert(dsg.stackMapping.isEmpty) +// println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) +// +// // initial global mappings +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) +// +// } +// +// test("bottomup jumptable2 add_two") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2.adt", +// relfFile = "examples/jumptable2/jumptable2.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.bus.get(program.procs("add_two")) +// assert(dsg.pointTo.size == 9) +// assert(dsg.stackMapping.isEmpty) +// println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) +// +// // initial global mappings +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) +// +// } +// +// test("bottom up jumptable2 main") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2.adt", +// relfFile = "examples/jumptable2/jumptable2.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// +// +// val program = results.ir.program +// val dsg = results.analysis.get.bus.get(program.mainProcedure) +// assert(dsg.pointTo.size == 13) // 13 +// val framePointer = dsg.stackMapping(0).cells(0) +// val stack8 = dsg.stackMapping(8).cells(0) +// val stack16 = dsg.stackMapping(16).cells(0) +// val stack28 = dsg.stackMapping(28).cells(0) +// assert(dsg.pointTo(framePointer).equals(dsg.formals(R29))) +// assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) +// assert(dsg.pointTo(stack16).equals(dsg.formals(R1))) +// assert(dsg.pointTo(stack28).equals(dsg.formals(R0))) +// +// // initial global mappings +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) +// +// // bu +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))._1.node.get.collapsed) +// +// } +// +// +// +// test("bottom up interproc pointer arithmetic callee") { +// // same as interproc pointer arithmetic callee's local graph (no changes should have been made) +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", +// relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.bus.get(program.procs("callee")) +// val stack8 = dsg.stackMapping(8).cells(0) // R31 + 8 +// val stack24 = dsg.stackMapping(24).cells(0) // R31 + 24 +// assert(dsg.pointTo.size == 3) +// assert(dsg.getPointee(stack8).equals(dsg.formals(R0))) +// assert(dsg.getPointee(stack8)._1.offset == 0) +// assert(dsg.getPointee(stack24)._1.equals(dsg.formals(R0)._1.node.get.cells(16))) +// } +// +// +// test("bottom up interproc pointer arithmetic main") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", +// relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.bus.get(program.mainProcedure) +// val stack0 = dsg.stackMapping(0).cells(0) +// val stack8 = dsg.stackMapping(8).cells(0) +// val stack24 = dsg.stackMapping(24).cells(0) +// val stack32 = dsg.stackMapping(32).cells(0) +// val stack40 = dsg.stackMapping(40).cells(0) +// assert(dsg.pointTo.size == 9) +// assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) +// assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) +// assert(dsg.pointTo(stack24)._1.node.get.equals(dsg.pointTo(stack32)._1.node.get)) +// assert(dsg.pointTo(stack24)._1.offset == 0) +// assert(dsg.pointTo(stack32)._1.offset == 16) +// assert(dsg.pointTo.contains(dsg.pointTo(stack40)._1)) +// assert(dsg.pointTo(stack40)._1.node.get.equals(dsg.pointTo(stack24)._1.node.get)) +// assert(dsg.pointTo(stack40)._1.offset == 32) +// assert(dsg.pointTo(stack40)._2 == 0) +// assert(dsg.pointTo(stack32)._2 == 0) +// assert(dsg.pointTo(stack24)._2 == 0) +// } +// +// +// // top down tests +// test("top down jumptable2 main") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2.adt", +// relfFile = "examples/jumptable2/jumptable2.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// +// val program = results.ir.program +// val dsg = results.analysis.get.tds.get(program.mainProcedure) +// assert(dsg.pointTo.size == 13) // 13 +// val framePointer = dsg.stackMapping(0).cells(0) +// val stack8 = dsg.stackMapping(8).cells(0) +// val stack16 = dsg.stackMapping(16).cells(0) +// val stack28 = dsg.stackMapping(28).cells(0) +// assert(dsg.pointTo(framePointer).equals(dsg.formals(R29))) +// assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) +// assert(dsg.pointTo(stack16).equals(dsg.formals(R1))) +// assert(dsg.pointTo(stack28).equals(dsg.formals(R0))) +// +// // initial global mappings +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) +// +// // bu +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))._1.node.get.collapsed) +// } +// +// test("top down jumptable2 sub_seven") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2.adt", +// relfFile = "examples/jumptable2/jumptable2.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.tds.get(program.procs("sub_seven")) +// assert(dsg.pointTo.size == 9) +// assert(dsg.stackMapping.isEmpty) +// println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) +// +// // initial global mappings +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) +// +// +// } +// +// test("top down jumptable2 add_six") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2.adt", +// relfFile = "examples/jumptable2/jumptable2.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.tds.get(program.procs("add_six")) +// assert(dsg.pointTo.size == 9) +// assert(dsg.stackMapping.isEmpty) +// println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) +// +// // initial global mappings +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) +// +// } +// +// test("top down jumptable2 add_two") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/jumptable2/jumptable2.adt", +// relfFile = "examples/jumptable2/jumptable2.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.tds.get(program.procs("add_two")) +// assert(dsg.pointTo.size == 9) +// assert(dsg.stackMapping.isEmpty) +// println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) +// +// // initial global mappings +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) +// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) +// +// } +// +// test("top down interproc pointer arithmetic callee") { +// // same as interproc pointer arithmetic callee's local graph (no changes should have been made) +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", +// relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.tds.get(program.procs("callee")) +// val stack8 = dsg.stackMapping(8).cells(0) // R31 + 8 +// val stack24 = dsg.stackMapping(24).cells(0) // R31 + 24 +// assert(dsg.pointTo.size == 6) +// assert(dsg.getPointee(stack8).equals(dsg.formals(R0))) +// assert(dsg.getPointee(stack8)._1.offset == 16) +// assert(dsg.getPointee(stack24)._1.equals(dsg.formals(R0)._1.node.get.cells(32))) +// } +// +// +// // top down phase should be the same as bu phase +// test("top down interproc pointer arithmetic main") { +// val results = RunUtils.loadAndTranslate( +// BASILConfig( +// loading = ILLoadingConfig( +// inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", +// relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", +// specFile = None, +// dumpIL = None, +// ), +// staticAnalysis = Some(StaticAnalysisConfig()), +// boogieTranslation = BoogieGeneratorConfig(), +// outputPrefix = "boogie_out", +// ) +// ) +// val program = results.ir.program +// val dsg = results.analysis.get.tds.get(program.mainProcedure) +// val stack0 = dsg.stackMapping(0).cells(0) +// val stack8 = dsg.stackMapping(8).cells(0) +// val stack24 = dsg.stackMapping(24).cells(0) +// val stack32 = dsg.stackMapping(32).cells(0) +// val stack40 = dsg.stackMapping(40).cells(0) +// assert(dsg.pointTo.size == 9) +// assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) +// assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) +// assert(dsg.pointTo(stack24)._1.node.get.equals(dsg.pointTo(stack32)._1.node.get)) +// assert(dsg.pointTo(stack24)._1.offset == 0) +// assert(dsg.pointTo(stack32)._1.offset == 16) +// assert(dsg.pointTo.contains(dsg.pointTo(stack40)._1)) +// assert(dsg.pointTo(stack40)._1.node.get.equals(dsg.pointTo(stack24)._1.node.get)) +// assert(dsg.pointTo(stack40)._1.offset == 32) +// assert(dsg.pointTo(stack40)._2 == 0) +// assert(dsg.pointTo(stack32)._2 == 0) +// assert(dsg.pointTo(stack24)._2 == 0) +// } +// +//} From 55f9bd85ae30acc164d6781e8d2b1daeda11de94 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Tue, 13 Aug 2024 15:03:25 +1000 Subject: [PATCH 021/104] Adding regions into IR --- src/main/scala/analysis/MemoryModelMap.scala | 176 +++++++++++++++++-- src/main/scala/analysis/RegionInjector.scala | 27 ++- 2 files changed, 189 insertions(+), 14 deletions(-) diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index fc719d945..eb14b983c 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -30,6 +30,8 @@ class MemoryModelMap { private val heapMap: mutable.Map[RangeKey, HeapRegion] = mutable.TreeMap() private val dataMap: mutable.Map[RangeKey, DataRegion] = mutable.TreeMap() + private val uf = new UnionFind() + /** Add a range and object to the mapping * * @param offset the offset of the range @@ -66,6 +68,15 @@ class MemoryModelMap { currentDataMap.addOne(updatedRange -> currentMaxRegion) currentDataMap(RangeKey(offset, MAX_BIGINT)) = d } + case h: HeapRegion => + val currentHeapMap = heapMap + if (currentHeapMap.isEmpty) { + currentHeapMap(RangeKey(offset, offset + h.size.value - 1)) = h + } else { + val currentMaxRange = currentHeapMap.keys.maxBy(_.end) + val currentMaxRegion = currentHeapMap(currentMaxRange) + currentHeapMap(RangeKey(currentMaxRange.start + 1, h.size.value - 1)) = h + } } } @@ -164,6 +175,22 @@ class MemoryModelMap { for (dataRgn <- allDataRgns) { add(dataRgn.start.value, dataRgn) } + + // add heap regions + val rangeStart = 0 + for ((position, regions) <- memoryRegions) { + regions match { + case Lift(node) => + for (region <- node) { + region match { + case heapRegion: HeapRegion => + add(BigInt(0), heapRegion) + case _ => + } + } + case LiftedBottom => + } + } } // TODO: push and pop could be optimised by caching the results def pushContext(funName: String): Unit = { @@ -247,38 +274,93 @@ class MemoryModelMap { } } - matchingRegions.toSet + matchingRegions.toSet.map(returnRegion) + } + + def getRegionsWithSize(size: BigInt, function: String, negateCondition: Boolean = false): Set[MemoryRegion] = { + val matchingRegions = scala.collection.mutable.Set[MemoryRegion]() + + pushContext(function) + stackMap.foreach { + case (range, region) => + if (negateCondition) { + if (range.size != size) { + matchingRegions += region + } + } else if (range.size == size) { + matchingRegions += region + } + } + popContext() + + heapMap.foreach { case (range, region) => + if (negateCondition) { + if (range.size != size) { + matchingRegions += region + } + } else if (range.size == size) { + matchingRegions += region + } + } + + dataMap.foreach { case (range, region) => + if (negateCondition) { + if (range.size != size) { + matchingRegions += region + } + } else if (range.size == size) { + matchingRegions += region + } + } + + matchingRegions.toSet.map(returnRegion) + } + + def getAllocsPerProcedure: Map[String, Set[StackRegion]] = { + localStacks.map((name, stackRegions) => (name, stackRegions.toSet.map(returnRegion))).toMap } def getAllStackRegions: Set[StackRegion] = { - localStacks.values.toSet.flatten + localStacks.values.toSet.flatten.map(returnRegion) } - + def getAllDataRegions: Set[DataRegion] = { - dataMap.values.toSet + dataMap.values.toSet.map(returnRegion) } - + def getAllHeapRegions: Set[HeapRegion] = { - heapMap.values.toSet + heapMap.values.toSet.map(returnRegion) } - + def getAllRegions: Set[MemoryRegion] = { - (getAllStackRegions ++ getAllDataRegions ++ getAllHeapRegions) + getAllStackRegions ++ getAllDataRegions ++ getAllHeapRegions + } + + def getEnd(memoryRegion: MemoryRegion): BigInt = { // TODO: This would return a list of ends + val range = memoryRegion match { + case stackRegion: StackRegion => + stackMap.find((_, obj) => obj == stackRegion).map((range, _) => range).getOrElse(RangeKey(0, 0)) + case heapRegion: HeapRegion => + heapMap.find((_, obj) => obj == heapRegion).map((range, _) => range).getOrElse(RangeKey(0, 0)) + case dataRegion: DataRegion => + dataMap.find((_, obj) => obj == dataRegion).map((range, _) => range).getOrElse(RangeKey(0, 0)) + } + range.end } /* All regions that start at value and are exactly of length size */ def findStackFullAccessesOnly(value: BigInt, size: BigInt): Option[StackRegion] = { - stackMap.find((range, _) => range.start == value && range.size == size).map((range, obj) => obj) + stackMap.find((range, _) => range.start == value && range.size == size).map((range, obj) => returnRegion(obj)) } def findStackObject(value: BigInt): Option[StackRegion] = - stackMap.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => obj) + stackMap.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => returnRegion(obj)) def findSharedStackObject(value: BigInt): Set[StackRegion] = - sharedStackMap.values.flatMap(_.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => obj)).toSet + sharedStackMap.values.flatMap(_.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => returnRegion(obj))).toSet def findDataObject(value: BigInt): Option[DataRegion] = - dataMap.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => obj) + dataMap.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => returnRegion(obj)) override def toString: String = s"Stack: $stackMap\n Heap: $heapMap\n Data: $dataMap\n" @@ -323,6 +405,29 @@ class MemoryModelMap { logRegion(range, region) } } + + def mergeRegions(regions: Set[MemoryRegion]): MemoryRegion = { + // assert regions are of the same type + regions.foreach(uf.makeSet) + regions.foreach(uf.union(regions.head, _)) + uf.find(regions.head) + } + + private def returnRegion(region: MemoryRegion): MemoryRegion = { + uf.find(region) + } + + private def returnRegion(region: StackRegion): StackRegion = { + uf.find(region.asInstanceOf[MemoryRegion]).asInstanceOf[StackRegion] + } + + private def returnRegion(region: DataRegion): DataRegion = { + uf.find(region.asInstanceOf[MemoryRegion]).asInstanceOf[DataRegion] + } + + private def returnRegion(region: HeapRegion): HeapRegion = { + uf.find(region.asInstanceOf[MemoryRegion]).asInstanceOf[HeapRegion] + } } trait MemoryRegion { @@ -340,3 +445,50 @@ case class HeapRegion(override val regionIdentifier: String, size: BitVecLiteral case class DataRegion(override val regionIdentifier: String, start: BitVecLiteral) extends MemoryRegion { override def toString: String = s"Data($regionIdentifier, $start)" } + +class UnionFind { + // Map to store the parent of each region + private val parent: mutable.Map[MemoryRegion, MemoryRegion] = mutable.Map() + + // Map to store the size of each set, used for union by rank + private val size: mutable.Map[MemoryRegion, Int] = mutable.Map() + + // Initialise each region to be its own parent and set size to 1 + def makeSet(region: MemoryRegion): Unit = { + parent(region) = region + size(region) = 1 + } + + // Find operation with path compression + def find(region: MemoryRegion): MemoryRegion = { + if (!parent.contains(region)) { + makeSet(region) + } + + if (parent(region) != region) { + parent(region) = find(parent(region)) // Path compression + } + parent(region) + } + + // Union operation with union by rank + def union(region1: MemoryRegion, region2: MemoryRegion): Unit = { + val root1 = find(region1) + val root2 = find(region2) + + if (root1 != root2) { + if (size(root1) < size(root2)) { + parent(root1) = root2 + size(root2) += size(root1) + } else { + parent(root2) = root1 + size(root1) += size(root2) + } + } + } + + // Check if two regions are in the same set + def connected(region1: MemoryRegion, region2: MemoryRegion): Boolean = { + find(region1) == find(region2) + } +} \ No newline at end of file diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index 63358aeab..fc8633f9b 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -4,11 +4,13 @@ import ir.* import util.Logger import scala.collection.immutable import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer /** * Replaces the region access with the calculated memory region. */ class RegionInjector(domain: mutable.Set[CFGPosition], + program: Program, constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], mmm: MemoryModelMap, reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], @@ -17,6 +19,8 @@ class RegionInjector(domain: mutable.Set[CFGPosition], def nodeVisitor(): Unit = { for (elem <- domain) {localTransfer(elem)} + program.initialMemory = transformMemorySections(program.initialMemory) + program.readOnlyMemory = transformMemorySections(program.readOnlyMemory) } /** @@ -206,7 +210,7 @@ class RegionInjector(domain: mutable.Set[CFGPosition], } else if (regions.size > 1) { Logger.warn(s"MemStore is: ${cmd}") Logger.warn(s"Multiple regions found for memory store: ${regions}") - expr + MemoryStore(Memory(mmm.mergeRegions(regions).regionIdentifier, mem.addressSize, mem.valueSize), eval(index, cmd), eval(value, cmd), endian, size) } else { Logger.warn(s"MemStore is: ${cmd}") Logger.warn(s"No region found for memory store") @@ -220,7 +224,7 @@ class RegionInjector(domain: mutable.Set[CFGPosition], } else if (regions.size > 1) { Logger.warn(s"MemLoad is: ${cmd}") Logger.warn(s"Multiple regions found for memory load: ${regions}") - expr + MemoryLoad(Memory(mmm.mergeRegions(regions).regionIdentifier, mem.addressSize, mem.valueSize), eval(index, cmd), endian, size) } else { Logger.warn(s"MemLoad is: ${cmd}") Logger.warn(s"No region found for memory load") @@ -254,4 +258,23 @@ class RegionInjector(domain: mutable.Set[CFGPosition], case call: IndirectCall => // ignore IndirectCall case _ => // ignore other kinds of nodes } + + def transformMemorySections(memorySegment: ArrayBuffer[MemorySection]): ArrayBuffer[MemorySection] = { + val newArrayBuffer = ArrayBuffer.empty[MemorySection] + for (elem <- memorySegment) { + elem match { + case mem: MemorySection => + val regions = mmm.findDataObject(mem.address) + if (regions.size == 1) { + newArrayBuffer += MemorySection(regions.head.regionIdentifier, mem.address, mem.size, mem.bytes) + Logger.warn(s"Region ${regions.get.regionIdentifier} found for memory section ${mem.address}") + } else { + newArrayBuffer += mem + Logger.warn(s"No region found for memory section ${mem.address}") + } + case _ => + } + } + newArrayBuffer + } } \ No newline at end of file From 834f34e68889ddbc74950088f684cd682f368cbe Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Tue, 13 Aug 2024 16:02:23 +1000 Subject: [PATCH 022/104] Merging only region injection files --- src/main/scala/analysis/MemoryModelMap.scala | 227 +++++++++++++- src/main/scala/analysis/RegionInjector.scala | 301 +++++++++++++++++++ src/main/scala/util/RunUtils.scala | 4 + 3 files changed, 529 insertions(+), 3 deletions(-) create mode 100644 src/main/scala/analysis/RegionInjector.scala diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index d91340d5b..eb14b983c 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -30,6 +30,8 @@ class MemoryModelMap { private val heapMap: mutable.Map[RangeKey, HeapRegion] = mutable.TreeMap() private val dataMap: mutable.Map[RangeKey, DataRegion] = mutable.TreeMap() + private val uf = new UnionFind() + /** Add a range and object to the mapping * * @param offset the offset of the range @@ -66,6 +68,15 @@ class MemoryModelMap { currentDataMap.addOne(updatedRange -> currentMaxRegion) currentDataMap(RangeKey(offset, MAX_BIGINT)) = d } + case h: HeapRegion => + val currentHeapMap = heapMap + if (currentHeapMap.isEmpty) { + currentHeapMap(RangeKey(offset, offset + h.size.value - 1)) = h + } else { + val currentMaxRange = currentHeapMap.keys.maxBy(_.end) + val currentMaxRegion = currentHeapMap(currentMaxRange) + currentHeapMap(RangeKey(currentMaxRange.start + 1, h.size.value - 1)) = h + } } } @@ -164,6 +175,22 @@ class MemoryModelMap { for (dataRgn <- allDataRgns) { add(dataRgn.start.value, dataRgn) } + + // add heap regions + val rangeStart = 0 + for ((position, regions) <- memoryRegions) { + regions match { + case Lift(node) => + for (region <- node) { + region match { + case heapRegion: HeapRegion => + add(BigInt(0), heapRegion) + case _ => + } + } + case LiftedBottom => + } + } } // TODO: push and pop could be optimised by caching the results def pushContext(funName: String): Unit = { @@ -201,15 +228,139 @@ class MemoryModelMap { } } + /* All regions that either: + * 1. starts at value but size less than region size + * 2. starts at value but size more than region size (add both regions ie. next region) + * 3. starts between regions (start, end) and (value + size) => end + * 4. starts between regions (start, end) and (value + size) < end (add both regions ie. next region) + */ + def findStackPartialAccessesOnly(value: BigInt, size: BigInt): Set[StackRegion] = { + val matchingRegions = scala.collection.mutable.Set[StackRegion]() + + stackMap.foreach { case (range, region) => + // Condition 1: Starts at value but size less than region size + if (range.start == value && range.size > size) { + matchingRegions += region + } + // Condition 2: Starts at value but size more than region size (add subsequent regions) + else if (range.start == value && range.size < size) { + matchingRegions += region + var remainingSize = size - range.size + var nextStart = range.end + stackMap.toSeq.sortBy(_._1.start).dropWhile(_._1.start <= range.start).foreach { case (nextRange, nextRegion) => + if (remainingSize > 0) { + matchingRegions += nextRegion + remainingSize -= nextRange.size + nextStart = nextRange.end + } + } + } + // Condition 3: Starts between regions (start, end) and (value + size) => end + else if (range.start < value && (value + size) <= range.end) { + matchingRegions += region + } + // Condition 4: Starts between regions (start, end) and (value + size) < end (add subsequent regions) + else if (range.start < value && (value + size) > range.end) { + matchingRegions += region + var remainingSize = (value + size) - range.end + var nextStart = range.end + stackMap.toSeq.sortBy(_._1.start).dropWhile(_._1.start <= range.start).foreach { case (nextRange, nextRegion) => + if (remainingSize > 0) { + matchingRegions += nextRegion + remainingSize -= nextRange.size + nextStart = nextRange.end + } + } + } + } + + matchingRegions.toSet.map(returnRegion) + } + + def getRegionsWithSize(size: BigInt, function: String, negateCondition: Boolean = false): Set[MemoryRegion] = { + val matchingRegions = scala.collection.mutable.Set[MemoryRegion]() + + pushContext(function) + stackMap.foreach { + case (range, region) => + if (negateCondition) { + if (range.size != size) { + matchingRegions += region + } + } else if (range.size == size) { + matchingRegions += region + } + } + popContext() + + heapMap.foreach { case (range, region) => + if (negateCondition) { + if (range.size != size) { + matchingRegions += region + } + } else if (range.size == size) { + matchingRegions += region + } + } + + dataMap.foreach { case (range, region) => + if (negateCondition) { + if (range.size != size) { + matchingRegions += region + } + } else if (range.size == size) { + matchingRegions += region + } + } + + matchingRegions.toSet.map(returnRegion) + } + + def getAllocsPerProcedure: Map[String, Set[StackRegion]] = { + localStacks.map((name, stackRegions) => (name, stackRegions.toSet.map(returnRegion))).toMap + } + + def getAllStackRegions: Set[StackRegion] = { + localStacks.values.toSet.flatten.map(returnRegion) + } + + def getAllDataRegions: Set[DataRegion] = { + dataMap.values.toSet.map(returnRegion) + } + + def getAllHeapRegions: Set[HeapRegion] = { + heapMap.values.toSet.map(returnRegion) + } + + def getAllRegions: Set[MemoryRegion] = { + getAllStackRegions ++ getAllDataRegions ++ getAllHeapRegions + } + + def getEnd(memoryRegion: MemoryRegion): BigInt = { // TODO: This would return a list of ends + val range = memoryRegion match { + case stackRegion: StackRegion => + stackMap.find((_, obj) => obj == stackRegion).map((range, _) => range).getOrElse(RangeKey(0, 0)) + case heapRegion: HeapRegion => + heapMap.find((_, obj) => obj == heapRegion).map((range, _) => range).getOrElse(RangeKey(0, 0)) + case dataRegion: DataRegion => + dataMap.find((_, obj) => obj == dataRegion).map((range, _) => range).getOrElse(RangeKey(0, 0)) + } + range.end + } + + /* All regions that start at value and are exactly of length size */ + def findStackFullAccessesOnly(value: BigInt, size: BigInt): Option[StackRegion] = { + stackMap.find((range, _) => range.start == value && range.size == size).map((range, obj) => returnRegion(obj)) + } def findStackObject(value: BigInt): Option[StackRegion] = - stackMap.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => obj) + stackMap.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => returnRegion(obj)) def findSharedStackObject(value: BigInt): Set[StackRegion] = - sharedStackMap.values.flatMap(_.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => obj)).toSet + sharedStackMap.values.flatMap(_.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => returnRegion(obj))).toSet def findDataObject(value: BigInt): Option[DataRegion] = - dataMap.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => obj) + dataMap.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => returnRegion(obj)) override def toString: String = s"Stack: $stackMap\n Heap: $heapMap\n Data: $dataMap\n" @@ -254,6 +405,29 @@ class MemoryModelMap { logRegion(range, region) } } + + def mergeRegions(regions: Set[MemoryRegion]): MemoryRegion = { + // assert regions are of the same type + regions.foreach(uf.makeSet) + regions.foreach(uf.union(regions.head, _)) + uf.find(regions.head) + } + + private def returnRegion(region: MemoryRegion): MemoryRegion = { + uf.find(region) + } + + private def returnRegion(region: StackRegion): StackRegion = { + uf.find(region.asInstanceOf[MemoryRegion]).asInstanceOf[StackRegion] + } + + private def returnRegion(region: DataRegion): DataRegion = { + uf.find(region.asInstanceOf[MemoryRegion]).asInstanceOf[DataRegion] + } + + private def returnRegion(region: HeapRegion): HeapRegion = { + uf.find(region.asInstanceOf[MemoryRegion]).asInstanceOf[HeapRegion] + } } trait MemoryRegion { @@ -271,3 +445,50 @@ case class HeapRegion(override val regionIdentifier: String, size: BitVecLiteral case class DataRegion(override val regionIdentifier: String, start: BitVecLiteral) extends MemoryRegion { override def toString: String = s"Data($regionIdentifier, $start)" } + +class UnionFind { + // Map to store the parent of each region + private val parent: mutable.Map[MemoryRegion, MemoryRegion] = mutable.Map() + + // Map to store the size of each set, used for union by rank + private val size: mutable.Map[MemoryRegion, Int] = mutable.Map() + + // Initialise each region to be its own parent and set size to 1 + def makeSet(region: MemoryRegion): Unit = { + parent(region) = region + size(region) = 1 + } + + // Find operation with path compression + def find(region: MemoryRegion): MemoryRegion = { + if (!parent.contains(region)) { + makeSet(region) + } + + if (parent(region) != region) { + parent(region) = find(parent(region)) // Path compression + } + parent(region) + } + + // Union operation with union by rank + def union(region1: MemoryRegion, region2: MemoryRegion): Unit = { + val root1 = find(region1) + val root2 = find(region2) + + if (root1 != root2) { + if (size(root1) < size(root2)) { + parent(root1) = root2 + size(root2) += size(root1) + } else { + parent(root2) = root1 + size(root1) += size(root2) + } + } + } + + // Check if two regions are in the same set + def connected(region1: MemoryRegion, region2: MemoryRegion): Boolean = { + find(region1) == find(region2) + } +} \ No newline at end of file diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala new file mode 100644 index 000000000..1d6ad0781 --- /dev/null +++ b/src/main/scala/analysis/RegionInjector.scala @@ -0,0 +1,301 @@ +package analysis + +import ir.* +import util.Logger +import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer + +/** + * Replaces the region access with the calculated memory region. + */ +class RegionInjector(domain: mutable.Set[CFGPosition], + program: Program, + constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], + mmm: MemoryModelMap, + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + globalOffsets: Map[BigInt, BigInt]) { + private val stackPointer = Register("R31", 64) + + def nodeVisitor(): Unit = { + for (elem <- domain) {localTransfer(elem)} + program.initialMemory = transformMemorySections(program.initialMemory) + program.readOnlyMemory = transformMemorySections(program.readOnlyMemory) + } + + /** + * In expressions that have accesses within a region, we need to relocate + * the base address to the actual address using the relocation table. + * MUST RELOCATE because MMM iterate to find the lowest address + * TODO: May need to iterate over the relocation table to find the actual address + * + * @param address + * @param globalOffsets + * @return BitVecLiteral: the relocated address + */ + def relocatedBase(address: BitVecLiteral, globalOffsets: Map[BigInt, BigInt]): BitVecLiteral = { + val tableAddress = globalOffsets.getOrElse(address.value, address.value) + // this condition checks if the address is not layered and returns if it is not + if (tableAddress != address.value && !globalOffsets.contains(tableAddress)) { + return address + } + BitVecLiteral(tableAddress, address.size) + } + + /** + * Used to reduce an expression that may be a sub-region of a memory region. + * Pointer reduction example: + * R2 = R31 + 20 + * Mem[R2 + 8] <- R1 + * + * Steps: + * 1) R2 = R31 + 20 <- ie. stack access (assume R31 = stackPointer) + * ↓ + * R2 = StackRegion("stack_1", 20) + * + * 2) Mem[R2 + 8] <- R1 <- ie. memStore + * ↓ + * (StackRegion("stack_1", 20) + 8) <- R1 + * ↓ + * MMM.get(20 + 8) <- R1 + * + * @param binExpr + * @param n + * @return Set[MemoryRegion]: a set of regions that the expression may be pointing to + */ + def reducibleToRegion(binExpr: BinaryExpr, n: Command): Set[MemoryRegion] = { + var reducedRegions = Set.empty[MemoryRegion] + binExpr.arg1 match { + case variable: Variable => + evaluateExpressionWithSSA(binExpr, constantProp(n), n, reachingDefs).foreach { b => + val region = mmm.findDataObject(b.value) + reducedRegions = reducedRegions ++ region + } + if (reducedRegions.nonEmpty) { + return reducedRegions + } + val ctx = getUse(variable, n, reachingDefs) + for (i <- ctx) { + if (i != n) { // handles loops (ie. R19 = R19 + 1) %00000662 in jumptable2 + val regions = i.rhs match { + case loadL: MemoryLoad => + val foundRegions = exprToRegion(loadL.index, i) + val toReturn = mutable.Set[MemoryRegion]().addAll(foundRegions) + for { + f <- foundRegions + } { + // TODO: Must enable this (probably need to calculate those contents beforehand) +// if (memoryRegionContents.contains(f)) { +// memoryRegionContents(f).foreach { +// case b: BitVecLiteral => +// // val region = mmm.findDataObject(b.value) +// // if (region.isDefined) { +// // toReturn.addOne(region.get) +// // } +// case r: MemoryRegion => +// toReturn.addOne(r) +// toReturn.remove(f) +// } +// } + } + toReturn.toSet + case _: BitVecLiteral => + Set.empty[MemoryRegion] + case _ => + println(s"Unknown expression: ${i}") + println(ctx) + exprToRegion(i.rhs, i) + } + val results = evaluateExpressionWithSSA(binExpr.arg2, constantProp(n), n, reachingDefs) + for { + b <- results + r <- regions + } { + r match { + case stackRegion: StackRegion => + println(s"StackRegion: ${stackRegion.start}") + println(s"BitVecLiteral: ${b}") + if (b.size == stackRegion.start.size) { + val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) + evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => + reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, b2), n) + } + } + case dataRegion: DataRegion => + val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start, globalOffsets), b) + evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => + reducedRegions ++= exprToRegion(b2, n) + } + case _ => + } + } + } + } + case _ => + } + reducedRegions + } + + /** + * Finds a region for a given expression using MMM results + * + * @param expr + * @param n + * @return Set[MemoryRegion]: a set of regions that the expression may be pointing to + */ + def exprToRegion(expr: Expr, n: Command): Set[MemoryRegion] = { + var res = Set[MemoryRegion]() + mmm.popContext() + mmm.pushContext(IRWalk.procedure(n).name) + expr match { // TODO: Stack detection here should be done in a better way or just merged with data + case binOp: BinaryExpr if binOp.arg1 == stackPointer => + evaluateExpressionWithSSA(binOp.arg2, constantProp(n), n, reachingDefs).foreach { b => + if binOp.arg2.variables.exists { v => v.sharedVariable } then { + Logger.debug("Shared stack object: " + b) + Logger.debug("Shared in: " + expr) + val regions = mmm.findSharedStackObject(b.value) + Logger.debug("found: " + regions) + res ++= regions + } else { + val region = mmm.findStackObject(b.value) + if (region.isDefined) { + res = res + region.get + } + } + } + res + case binaryExpr: BinaryExpr => + res ++= reducibleToRegion(binaryExpr, n) + res + case v: Variable if v == stackPointer => + res ++= mmm.findStackObject(0) + res + case v: Variable => + evaluateExpressionWithSSA(expr, constantProp(n), n, reachingDefs).foreach { b => + Logger.debug("BitVecLiteral: " + b) + val region = mmm.findDataObject(b.value) + if (region.isDefined) { + res += region.get + } + } + if (res.isEmpty) { // may be passed as param + val ctx = getUse(v, n, reachingDefs) + for (i <- ctx) { + i.rhs match { + case load: MemoryLoad => // treat as a region + res ++= exprToRegion(load.index, i) + case binaryExpr: BinaryExpr => + res ++= reducibleToRegion(binaryExpr, i) + case _ => // also treat as a region (for now) even if just Base + Offset without memLoad + res ++= exprToRegion(i.rhs, i) + } + } + } + res + case _ => + evaluateExpressionWithSSA(expr, constantProp(n), n, reachingDefs).foreach { b => + Logger.debug("BitVecLiteral: " + b) + val region = mmm.findDataObject(b.value) + if (region.isDefined) { + res += region.get + } + } + res + } + } + + /** Default implementation of eval. + */ + def eval(expr: Expr, cmd: Command): Expr = { + expr match + case literal: Literal => literal // ignore literals + case Extract(end, start, body) => + Extract(end, start, eval(body, cmd)) + case Repeat(repeats, body) => + Repeat(repeats, eval(body, cmd)) + case ZeroExtend(extension, body) => + ZeroExtend(extension, eval(body, cmd)) + case SignExtend(extension, body) => + SignExtend(extension, eval(body, cmd)) + case UnaryExpr(op, arg) => + UnaryExpr(op, eval(arg, cmd)) + case BinaryExpr(op, arg1, arg2) => + BinaryExpr(op, eval(arg1, cmd), eval(arg2, cmd)) + case MemoryLoad(mem, index, endian, size) => + // TODO: index should be replaced region + MemoryLoad(renameMemory(mem, index, cmd), eval(index, cmd), endian, size) + case variable: Variable => variable // ignore variables + } + + def renameMemory(mem: Memory, expr: Expr, cmd : Command): Memory = { + val regions = exprToRegion(eval(expr, cmd), cmd) + if (regions.size == 1) { + Logger.warn(s"Mem CMD is: ${cmd}") + Logger.warn(s"Region found for mem: ${regions.head}") + regions.head match { + case stackRegion: StackRegion => + StackMemory(stackRegion.regionIdentifier, mem.addressSize, mem.valueSize) + case dataRegion: DataRegion => + SharedMemory(dataRegion.regionIdentifier, mem.addressSize, mem.valueSize) + case _ => + } + } else if (regions.size > 1) { + Logger.warn(s"Mem CMD is: ${cmd}") + Logger.warn(s"Multiple regions found for mem: ${regions}") + mmm.mergeRegions(regions) match { + case stackRegion: StackRegion => + StackMemory(stackRegion.regionIdentifier, mem.addressSize, mem.valueSize) + case dataRegion: DataRegion => + SharedMemory(dataRegion.regionIdentifier, mem.addressSize, mem.valueSize) + case _ => + } + } else { + Logger.warn(s"Mem CMD is: ${cmd}") + Logger.warn(s"No region found for mem") + } + mem + } + + /** Transfer function for state lattice elements. + */ + def localTransfer(n: CFGPosition): Unit = n match { + case cmd: Command => + cmd match + case statement: Statement => statement match + case assign: Assign => + assign.rhs = eval(assign.rhs, cmd) + case mAssign: MemoryAssign => + mAssign.mem = renameMemory(mAssign.mem, mAssign.index, cmd) + mAssign.index = eval(mAssign.index, cmd) + mAssign.value = eval(mAssign.value, cmd) + case nop: NOP => // ignore NOP + case assert: Assert => + assert.body = eval(assert.body, cmd) + case assume: Assume => + assume.body = eval(assume.body, cmd) + case jump: Jump => jump match + case to: GoTo => // ignore GoTo + case call: Call => call match + case call: DirectCall => // ignore DirectCall + case call: IndirectCall => // ignore IndirectCall + case _ => // ignore other kinds of nodes + } + + def transformMemorySections(memorySegment: ArrayBuffer[MemorySection]): ArrayBuffer[MemorySection] = { + val newArrayBuffer = ArrayBuffer.empty[MemorySection] + for (elem <- memorySegment) { + elem match { + case mem: MemorySection => + val regions = mmm.findDataObject(mem.address) + if (regions.size == 1) { + newArrayBuffer += MemorySection(regions.head.regionIdentifier, mem.address, mem.size, mem.bytes) + Logger.warn(s"Region ${regions.get.regionIdentifier} found for memory section ${mem.address}") + } else { + newArrayBuffer += mem + Logger.warn(s"No region found for memory section ${mem.address}") + } + case _ => + } + } + newArrayBuffer + } +} \ No newline at end of file diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index da353ac21..12138a67e 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -677,6 +677,10 @@ object StaticAnalysis { mmm.convertMemoryRegions(mraResult, mergedSubroutines, globalOffsets, mraSolver.procedureToSharedRegions) mmm.logRegions() + Logger.info("[!] Injecting regions") + val regionInjector = RegionInjector(domain, IRProgram, constPropResultWithSSA, mmm, reachingDefinitionsAnalysisResults, globalOffsets) + regionInjector.nodeVisitor() + Logger.info("[!] Running Steensgaard") val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResultWithSSA, regionAccessesAnalysisResults, mmm, reachingDefinitionsAnalysisResults, globalOffsets) steensgaardSolver.analyze() From f607fdf336a04c0bec8d7c6c46460c3223ecc166 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Tue, 13 Aug 2024 16:10:07 +1000 Subject: [PATCH 023/104] Fixed Small Bugs --- src/main/scala/analysis/RegionInjector.scala | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index 1d6ad0781..2b227b415 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -210,6 +210,9 @@ class RegionInjector(domain: mutable.Set[CFGPosition], case literal: Literal => literal // ignore literals case Extract(end, start, body) => Extract(end, start, eval(body, cmd)) + case UninterpretedFunction(name, params, returnType) => + val newParams = params.map { p => eval(p, cmd) } + UninterpretedFunction(name, newParams, returnType) case Repeat(repeats, body) => Repeat(repeats, eval(body, cmd)) case ZeroExtend(extension, body) => @@ -233,9 +236,9 @@ class RegionInjector(domain: mutable.Set[CFGPosition], Logger.warn(s"Region found for mem: ${regions.head}") regions.head match { case stackRegion: StackRegion => - StackMemory(stackRegion.regionIdentifier, mem.addressSize, mem.valueSize) + return StackMemory(stackRegion.regionIdentifier, mem.addressSize, mem.valueSize) case dataRegion: DataRegion => - SharedMemory(dataRegion.regionIdentifier, mem.addressSize, mem.valueSize) + return SharedMemory(dataRegion.regionIdentifier, mem.addressSize, mem.valueSize) case _ => } } else if (regions.size > 1) { @@ -243,9 +246,9 @@ class RegionInjector(domain: mutable.Set[CFGPosition], Logger.warn(s"Multiple regions found for mem: ${regions}") mmm.mergeRegions(regions) match { case stackRegion: StackRegion => - StackMemory(stackRegion.regionIdentifier, mem.addressSize, mem.valueSize) + return StackMemory(stackRegion.regionIdentifier, mem.addressSize, mem.valueSize) case dataRegion: DataRegion => - SharedMemory(dataRegion.regionIdentifier, mem.addressSize, mem.valueSize) + return SharedMemory(dataRegion.regionIdentifier, mem.addressSize, mem.valueSize) case _ => } } else { @@ -293,7 +296,7 @@ class RegionInjector(domain: mutable.Set[CFGPosition], newArrayBuffer += mem Logger.warn(s"No region found for memory section ${mem.address}") } - case _ => + case null => } } newArrayBuffer From 5a20e37c7862e324cd823fcae4078c53ef540e36 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Tue, 13 Aug 2024 16:16:24 +1000 Subject: [PATCH 024/104] Dont ignore regions --- src/main/scala/analysis/MemoryRegionAnalysis.scala | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 65aa1cc20..6817589b4 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -202,12 +202,8 @@ trait MemoryRegionAnalysis(val program: Program, s } case memAssign: MemoryAssign => - if (ignoreRegions.contains(memAssign.value)) { - s - } else { - val result = eval(memAssign.index, s, cmd) - regionLattice.lub(s, result) - } + val result = eval(memAssign.index, s, cmd) + regionLattice.lub(s, result) case assign: Assign => stackDetection(assign) var m = s From f798374a3103ab92875c70d1a9c08f611977549f Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Mon, 19 Aug 2024 11:27:55 +1000 Subject: [PATCH 025/104] Changes to consider local variables and initial offsets of a procedure --- .../scala/analysis/MemoryRegionAnalysis.scala | 29 +++++++++++++++---- src/main/scala/analysis/RegionInjector.scala | 27 +++++++++++++---- src/main/scala/util/RunUtils.scala | 5 +++- 3 files changed, 50 insertions(+), 11 deletions(-) diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 6817589b4..8decde0e4 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -1,5 +1,6 @@ package analysis +import analysis.BitVectorEval.isNegative import analysis.solvers.WorklistFixpointSolverWithReachability import ir.* import util.Logger @@ -94,7 +95,7 @@ trait MemoryRegionAnalysis(val program: Program, def reducibleToRegion(binExpr: BinaryExpr, n: Command): Set[MemoryRegion] = { var reducedRegions = Set.empty[MemoryRegion] binExpr.arg1 match { - case variable: Variable => + case variable: Variable if !spList.contains(variable) => val ctx = getUse(variable, n, reachingDefs) for (i <- ctx) { val regions = i.rhs match { @@ -121,10 +122,25 @@ trait MemoryRegionAnalysis(val program: Program, } } case _ => + eval(binExpr, Set.empty, n) } reducedRegions } + def reducibleVariable(variable: Variable, n: Command): Set[MemoryRegion] = { + var regions = Set.empty[MemoryRegion] + val ctx = getDefinition(variable, n, reachingDefs) + for (i <- ctx) { + i.rhs match { + case binaryExpr: BinaryExpr => + regions = regions ++ reducibleToRegion(binaryExpr, i) + case _ => + //regions = regions ++ eval(i.rhs, Set.empty, i) + } + } + regions + } + def eval(exp: Expr, env: Set[MemoryRegion], n: Command): Set[MemoryRegion] = { Logger.debug(s"evaluating $exp") Logger.debug(s"env: $env") @@ -133,7 +149,12 @@ trait MemoryRegionAnalysis(val program: Program, case binOp: BinaryExpr => if (spList.contains(binOp.arg1)) { evaluateExpression(binOp.arg2, constantProp(n)) match { - case Some(b: BitVecLiteral) => Set(poolMaster(b, IRWalk.procedure(n))) + case Some(b: BitVecLiteral) => + if (isNegative(b)) { + Set(poolMaster(BitVecLiteral(0, 64), IRWalk.procedure(n))) + } else { + Set(poolMaster(b, IRWalk.procedure(n))) + } case None => env } } else if (reducibleToRegion(binOp, n).nonEmpty) { @@ -146,8 +167,6 @@ trait MemoryRegionAnalysis(val program: Program, } case variable: Variable => variable match { - case _: LocalVar => - env case reg: Register if spList.contains(reg) => eval(BitVecLiteral(0, 64), env, n) case _ => @@ -155,7 +174,7 @@ trait MemoryRegionAnalysis(val program: Program, case Some(b: BitVecLiteral) => eval(b, env, n) case _ => - env // we cannot evaluate this to a concrete value, we need VSA for this + reducibleVariable(variable, n) } } case memoryLoad: MemoryLoad => diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index 2b227b415..942595793 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -1,7 +1,9 @@ package analysis +import analysis.BitVectorEval.isNegative import ir.* import util.Logger + import scala.collection.mutable import scala.collection.mutable.ArrayBuffer @@ -156,19 +158,22 @@ class RegionInjector(domain: mutable.Set[CFGPosition], Logger.debug("found: " + regions) res ++= regions } else { + if (isNegative(b)) { + val region = mmm.findStackObject(0) + if (region.isDefined) { + res = res + region.get + } + } val region = mmm.findStackObject(b.value) if (region.isDefined) { res = res + region.get } } } - res case binaryExpr: BinaryExpr => res ++= reducibleToRegion(binaryExpr, n) - res case v: Variable if v == stackPointer => res ++= mmm.findStackObject(0) - res case v: Variable => evaluateExpressionWithSSA(expr, constantProp(n), n, reachingDefs).foreach { b => Logger.debug("BitVecLiteral: " + b) @@ -177,6 +182,17 @@ class RegionInjector(domain: mutable.Set[CFGPosition], res += region.get } } + if (res.isEmpty) { + val ctx = getDefinition(v, n, reachingDefs) + for (i <- ctx) { + i.rhs match { + case be: BinaryExpr => + res = res ++ exprToRegion(eval(i.rhs, i), n) + case _ => + } + } + } + if (res.isEmpty) { // may be passed as param val ctx = getUse(v, n, reachingDefs) for (i <- ctx) { @@ -190,7 +206,8 @@ class RegionInjector(domain: mutable.Set[CFGPosition], } } } - res + case load: MemoryLoad => // treat as a region + res ++= exprToRegion(load.index, n) case _ => evaluateExpressionWithSSA(expr, constantProp(n), n, reachingDefs).foreach { b => Logger.debug("BitVecLiteral: " + b) @@ -199,8 +216,8 @@ class RegionInjector(domain: mutable.Set[CFGPosition], res += region.get } } - res } + res } /** Default implementation of eval. diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 12138a67e..821cb80c8 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -1,11 +1,12 @@ package util -import java.io.{File, PrintWriter, FileInputStream, BufferedWriter, FileWriter, IOException} +import java.io.{BufferedWriter, File, FileInputStream, FileWriter, IOException, PrintWriter} import com.grammatech.gtirb.proto.IR.IR import com.grammatech.gtirb.proto.Module.Module import com.grammatech.gtirb.proto.Section.Section import spray.json.* import gtirb.* + import scala.collection.mutable.ListBuffer import scala.collection.mutable.ArrayBuffer import java.io.{File, PrintWriter} @@ -25,6 +26,7 @@ import org.antlr.v4.runtime.BailErrorStrategy import org.antlr.v4.runtime.{CharStreams, CommonTokenStream, Token} import translating.* import util.Logger + import java.util.Base64 import spray.json.DefaultJsonProtocol.* import util.intrusive_list.IntrusiveList @@ -32,6 +34,7 @@ import analysis.CfgCommandNode import scala.annotation.tailrec import scala.collection.mutable +import scala.sys.exit /** This file contains the main program execution. See RunUtils.loadAndTranslate for the high-level process. */ From 7583c31a2cb4692650479b8bc796a4b5669cca82 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 19 Aug 2024 16:59:48 +1000 Subject: [PATCH 026/104] resolved bugs --- src/main/scala/analysis/DSAUtility.scala | 104 ++++--- src/main/scala/analysis/Local.scala | 13 +- src/test/scala/LocalTest.scala | 364 +++++++++++------------ 3 files changed, 249 insertions(+), 232 deletions(-) diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index 8dbd4e124..bf67cf7a2 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -267,6 +267,7 @@ class DSG(val proc: Procedure, else Set(formals(arg)) + /** * collects all the nodes that are currently in the DSG and updates nodes member variable */ @@ -282,48 +283,65 @@ class DSG(val proc: Procedure, /** * Collapses the node causing it to lose field sensitivity */ - def collapseNode(n: DSN): Unit = + def collapseNode(n: DSN): DSN = + + val (term, offset) = solver.find(n.term) - val node = term.asInstanceOf[Derm].node - val collapedCell = DSC(Some(node), 0) - - - var pointeeInternalOffset: BigInt = 0 - val cell = node.cells.tail.foldLeft(adjust(node.cells.head._2.getPointee)) { - (c, field) => - val cell = field._2 - if cell._pointee.isDefined && cell.getPointee.cell == cell then - cell._pointee = Some(Slice(collapedCell, 0)) -// collapedCell._pointee = Some(Slice(collapedCell, 0)) - c - else if cell._pointee.isDefined then - val slice = cell.getPointee - if slice.internalOffset > pointeeInternalOffset then - pointeeInternalOffset = slice.internalOffset - mergeCells(c, adjust(slice)) - else - c - } + val node: DSN = term.asInstanceOf[Derm].node + + if !(n.collapsed || find(n).node.collapsed) then + + val collapsedNode: DSN = DSN(n.graph) + val collapedCell = DSC(Some(collapsedNode), 0) + + n.flags.collapsed = true + collapsedNode.flags.collapsed = true + + var pointeeInternalOffset: BigInt = 0 + var pointToItself = false + var cell = node.cells.tail.foldLeft(adjust(node.cells.head._2.getPointee)) { + (c, field) => + val cell = field._2 + val pointee = cell._pointee + if pointee.isDefined && adjust(cell.getPointee) == cell then + // cell._pointee = Some(Slice(collapedCell, 0)) + pointToItself = true + // collapedCell._pointee = Some(Slice(collapedCell, 0)) + c + else if pointee.isDefined then + val slice = cell.getPointee + if slice.internalOffset > pointeeInternalOffset then + pointeeInternalOffset = slice.internalOffset + mergeCells(c, adjust(slice)) + else + c + } + + if pointToItself then + cell = mergeCells(cell, collapedCell) + + + collapedCell._pointee = Some(Slice(collapedCell, 0)) + + assert(collapsedNode.cells.size == 1) -// node.cells.values.foreach( -// cell => -//// replace(cell, collapedCell, 0) TODO check that this works by just ignoring the replace -// -// pointTo.foreach { -// case (pointer, pointee) => -// if pointer.equals(cell) then -// pointTo.remove(pointer) -// pointTo.update(collapedCell, pointee) -// } -// ) - node.flags.collapsed = true + collapsedNode.children.addAll(node.children) + collapsedNode.children += (node -> 0) + collapsedNode.allocationRegions.addAll(node.allocationRegions) // add regions and flags of node 1 to node 2 + collapsedNode.flags.join(node.flags) - node.cells.clear() - node.cells.addOne(0, collapedCell) - if cell.node.isDefined then - node.cells(0)._pointee = Some(Slice(cell, pointeeInternalOffset)) + solver.unify(n.term, collapsedNode.term, 0) + + collapsedNode + else + assert(find(n).node.collapsed) + find(n).node +// node.cells.clear() +// node.cells.addOne(0, collapedCell) +// if cell.node.isDefined then +// node.cells(0)._pointee = Some(Slice(cell, pointeeInternalOffset)) /** * this function merges all the overlapping cells in the given node @@ -418,21 +436,21 @@ class DSG(val proc: Procedure, if cell1.equals(cell2) then // same cell no action required cell1 else if cell1.node.isDefined && cell1.node.equals(cell2.node) then // same node different cells causes collapse - collapseNode(cell1.node.get) - cell1.node.get.cells(0) + val ne = collapseNode(cell1.node.get) + ne.cells(0) else if cell1.node.isEmpty then ??? // not sure how to handle this yet TODO possibly take it out of the merge? // replace(cell1, cell2, 0) cell2 else if cell1.node.get.collapsed || cell2.node.get.collapsed then // a collapsed node - val node1 = cell1.node.get - val node2 = cell2.node.get + var node1 = cell1.node.get + var node2 = cell2.node.get assert(node1.collapsed || node2.collapsed) - collapseNode(node1) // collapse the other node - collapseNode(node2) + node1 = collapseNode(node1) // collapse the other node + node2 = collapseNode(node2) node2.children.addAll(node1.children) node2.children += (node1 -> 0) node2.allocationRegions.addAll(node1.allocationRegions) // add regions and flags of node 1 to node 2 diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index 7dc1e2974..4cb3e26ee 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -133,7 +133,10 @@ class Local( // visit all the defining pointer operation on rhs variable first reachingDefs(position)(rhs).foreach(visit) // get the cells of all the SSA variables in the set - val cells: Set[Slice] = graph.getCells(position, rhs) + val cells: Set[Slice] = graph.getCells(position, rhs).foldLeft(Set[Slice]()) { + (col, slice) => + col + Slice(graph.find(slice.cell), slice.internalOffset) + } // merge the cells or their pointees with lhs var result = cells.foldLeft(lhs) { (c, t) => @@ -152,8 +155,8 @@ class Local( node.getCell(field) ) else - val node = cell.node.get - graph.collapseNode(node) + var node = cell.node.get + node = graph.collapseNode(node) graph.mergeCells(c, if pointee then graph.adjust(node.cells(0).getPointee) else node.cells(0)) } @@ -199,7 +202,7 @@ class Local( } val node = cell.node.get node.flags.unknown = true - graph.collapseNode(node) + val test = graph.collapseNode(node) } def visit(n: CFGPosition): Unit = { @@ -339,7 +342,7 @@ class Local( graph.nodes.foreach(node => node.children.foreach( child => - assert(graph.solver.find(child._1.term)._1.equals(node.term)) + assert(graph.solver.find(child._1.term).equals(graph.solver.find(node.term))) assert(graph.solver.find(child._1.term)._2.equals(child._2)) ) diff --git a/src/test/scala/LocalTest.scala b/src/test/scala/LocalTest.scala index a9f9bcc21..2d7e67d00 100644 --- a/src/test/scala/LocalTest.scala +++ b/src/test/scala/LocalTest.scala @@ -28,165 +28,161 @@ class LocalTest extends AnyFunSuite, TestUtil { val dsg = results.analysis.get.locals.get(program.mainProcedure) // assert(dsg.pointTo.size == 12) // 12 - val framePointer = dsg.find(dsg.adjust(dsg.find(dsg.stackMapping(0).cells(0)).getPointee)) - val R29formal = dsg.find(dsg.adjust(dsg.formals(R29))) + val framePointer = dsg.adjust(dsg.find(dsg.stackMapping(0).cells(0)).getPointee) + val R29formal = dsg.adjust(dsg.formals(R29)) assert(framePointer.equals(R29formal)) val stack8 = dsg.find(dsg.stackMapping(8).cells(0)) // R31 + 8 - assert(dsg.find(dsg.adjust(stack8.getPointee)).equals(dsg.find(dsg.adjust(dsg.formals(R30))))) + assert(dsg.adjust(stack8.getPointee).equals(dsg.adjust(dsg.formals(R30)))) val stack40 = dsg.find(dsg.stackMapping(40).cells(0))// R31 + 40 val stack32 = dsg.find(dsg.stackMapping(32).cells(0)) // R31 + 32 val stack24 = dsg.find(dsg.stackMapping(24).cells(0)) // R31 + 24 and Malloc - assert(dsg.find(dsg.adjust(stack32.getPointee)).equals(stack24)) + assert(dsg.adjust(stack32.getPointee).equals(stack24)) assert(stack24.node.get.collapsed) - assert(dsg.find(dsg.adjust(stack24.getPointee)).equals(stack24)) - -// assert((stack40.getPointee).equals(((dsg.globalMapping(AddressRange(69600, 69600))._1.cells(0).getPointee)._1.getPointee))) + assert(dsg.adjust(stack24.getPointee).equals(stack24)) + assert(dsg.find(dsg.adjust(stack40.getPointee)).equals(dsg.find(dsg.adjust(dsg.find(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69600))._1.cells(0).getPointee)).getPointee)))) } -} -// -// test("local jumptable2 sub_seven") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2.adt", -// relfFile = "examples/jumptable2/jumptable2.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.locals.get(program.procs("sub_seven")) + + test("local jumptable2 sub_seven") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.locals.get(program.procs("sub_seven")) // assert(dsg.pointTo.size == 9) -// assert(dsg.stackMapping.isEmpty) -// println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) -// -// // initial global mappings -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) -// -// } -// -// test("local jumptable2 add_six") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2.adt", -// relfFile = "examples/jumptable2/jumptable2.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.locals.get(program.procs("add_six")) + assert(dsg.stackMapping.isEmpty) + assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.get.collapsed) + + // initial global mappings + assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + + } + + test("local jumptable2 add_six") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.locals.get(program.procs("add_six")) // assert(dsg.pointTo.size == 9) -// assert(dsg.stackMapping.isEmpty) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) -// -// // initial global mappings -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) -// -// } -// -// test("local jumptable2 add_two") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2.adt", -// relfFile = "examples/jumptable2/jumptable2.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.locals.get(program.procs("add_two")) + assert(dsg.stackMapping.isEmpty) + assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.get.collapsed) + + // initial global mappings + assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + + } + + test("local jumptable2 add_two") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.locals.get(program.procs("add_two")) // assert(dsg.pointTo.size == 9) -// assert(dsg.stackMapping.isEmpty) -// println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) -// -// // initial global mappings -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) -// -// } -// -// test("local jumptable2 main") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2.adt", -// relfFile = "examples/jumptable2/jumptable2.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// -// -// val program = results.ir.program -// val dsg = results.analysis.get.locals.get(program.mainProcedure) + assert(dsg.stackMapping.isEmpty) + assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.get.collapsed) + + // initial global mappings + assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + + } + + test("local jumptable2 main") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + + + val program = results.ir.program + val dsg = results.analysis.get.locals.get(program.mainProcedure) // assert(dsg.pointTo.size == 12) // 12 -// val framePointer = dsg.stackMapping(0).cells(0) -// val stack8 = dsg.stackMapping(8).cells(0) -// val stack16 = dsg.stackMapping(16).cells(0) -// val stack28 = dsg.stackMapping(28).cells(0) -// assert(dsg.pointTo(framePointer).equals(dsg.formals(R29))) -// assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) -// assert(dsg.pointTo(stack16).equals(dsg.formals(R1))) -// assert(dsg.pointTo(stack28).equals(dsg.formals(R0))) -// -// // initial global mappings -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) -// -// -// } + val framePointer = dsg.find(dsg.stackMapping(0).cells(0)) + val stack8 = dsg.find(dsg.stackMapping(8).cells(0)) + val stack16 = dsg.find(dsg.stackMapping(16).cells(0)) + val stack28 = dsg.find(dsg.stackMapping(28).cells(0)) + assert(dsg.adjust(framePointer.getPointee).equals(dsg.adjust(dsg.formals(R29)))) + assert(dsg.adjust(stack8.getPointee).equals(dsg.adjust(dsg.formals(R30)))) + assert(dsg.adjust(stack16.getPointee).equals(dsg.adjust(dsg.formals(R1)))) + assert(dsg.adjust(stack28.getPointee).equals(dsg.adjust(dsg.formals(R0)))) + + // initial global mappings + assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + + + } // // // @@ -219,43 +215,43 @@ class LocalTest extends AnyFunSuite, TestUtil { // } // // -// test("unsafe pointer arithmetic") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.adt", -// relfFile = "examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.locals.get(program.mainProcedure) -// val stack0 = dsg.stackMapping(0).cells(0) -// val stack8 = dsg.stackMapping(8).cells(0) -// val stack24 = dsg.stackMapping(24).cells(0) -// val stack32 = dsg.stackMapping(32).cells(0) -// val stack40 = dsg.stackMapping(40).cells(0) -// val stack48 = dsg.stackMapping(48).cells(0) -// val stack56 = dsg.stackMapping(56).cells(0) + test("unsafe pointer arithmetic") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.adt", + relfFile = "examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.locals.get(program.mainProcedure) + val stack0 = dsg.adjust(dsg.find(dsg.stackMapping(0).cells(0)).getPointee) + val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) + val stack24 = dsg.adjust(dsg.find(dsg.stackMapping(24).cells(0)).getPointee) + val stack32 = dsg.adjust(dsg.find(dsg.stackMapping(32).cells(0)).getPointee) + val stack40 = dsg.adjust(dsg.find(dsg.stackMapping(40).cells(0)).getPointee) + val stack48 = dsg.adjust(dsg.find(dsg.stackMapping(48).cells(0)).getPointee) + val stack56 = dsg.adjust(dsg.find(dsg.stackMapping(56).cells(0)).getPointee) // assert(dsg.pointTo.size==10) -// assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) -// assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) -// assert(dsg.pointTo(stack24).equals(dsg.pointTo(stack32))) -// assert(dsg.pointTo(stack24)._2 == 0) -// assert(dsg.pointTo(stack24)._1.node.get.allocationRegions.size == 1) -// assert(dsg.pointTo(stack24)._1.node.get.allocationRegions.head.asInstanceOf[HeapLocation].size == 20) -// assert(dsg.pointTo(stack40)._1.node.get.allocationRegions.size == 1) -// assert(dsg.pointTo(stack48)._1.node.get.allocationRegions.head.asInstanceOf[HeapLocation].size == 8) -// assert(dsg.pointTo(dsg.pointTo(stack48)._1.node.get.cells(0)).equals(dsg.pointTo(stack40))) -// assert(dsg.pointTo(dsg.pointTo(stack48)._1.node.get.cells(0)).equals(dsg.pointTo(stack56))) -// assert(dsg.pointTo(stack24)._1.equals(dsg.pointTo(stack40)._1)) -// assert(dsg.pointTo(stack40)._2 == 1) -// } + assert(stack0.equals(dsg.adjust(dsg.formals(R29)))) + assert(stack8.equals(dsg.adjust(dsg.formals(R30)))) + assert(stack24.equals(stack32)) + assert(stack24.offset == 0) + assert(stack24.node.get.allocationRegions.size == 1) + assert(stack24.node.get.allocationRegions.head.asInstanceOf[HeapLocation].size == 20) + assert(stack40.node.get.allocationRegions.size == 1) + assert(stack48.node.get.allocationRegions.head.asInstanceOf[HeapLocation].size == 8) + assert(dsg.adjust(stack48.getPointee).equals(stack40)) + assert(dsg.adjust(stack48.getPointee).equals(stack56)) + assert(stack24.equals(stack40)) + assert(stack40.offset == 1) // todo check + } // // test("interproc pointer arithmetic main") { // val results = RunUtils.loadAndTranslate( @@ -856,4 +852,4 @@ class LocalTest extends AnyFunSuite, TestUtil { // assert(dsg.pointTo(stack24)._2 == 0) // } // -//} +} From 65e6ff706ab45d15285e918e1900bf07274afc25 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Tue, 20 Aug 2024 10:22:49 +1000 Subject: [PATCH 027/104] Adding condition to disable stackIdentification --- src/main/scala/util/RunUtils.scala | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 821cb80c8..07ae59fd3 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -466,18 +466,20 @@ object IRTransform { /** Cull unneccessary information that does not need to be included in the translation, and infer stack regions, and * add in modifies from the spec. */ - def prepareForTranslation(config: ILLoadingConfig, ctx: IRContext): Unit = { + def prepareForTranslation(config: BASILConfig, ctx: IRContext): Unit = { ctx.program.determineRelevantMemory(ctx.globalOffsets) Logger.info("[!] Stripping unreachable") val before = ctx.program.procedures.size - ctx.program.stripUnreachableFunctions(config.procedureTrimDepth) + ctx.program.stripUnreachableFunctions(config.loading.procedureTrimDepth) Logger.info( s"[!] Removed ${before - ctx.program.procedures.size} functions (${ctx.program.procedures.size} remaining)" ) - val stackIdentification = StackSubstituter() - stackIdentification.visitProgram(ctx.program) + if (config.staticAnalysis.isEmpty) { + val stackIdentification = StackSubstituter() + stackIdentification.visitProgram(ctx.program) + } val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap ctx.program.setModifies(specModifies) @@ -901,7 +903,7 @@ object RunUtils { interpreter.interpret(ctx.program) } - IRTransform.prepareForTranslation(q.loading, ctx) + IRTransform.prepareForTranslation(q, ctx) Logger.info("[!] Translating to Boogie") From 20f9cf65cf8c309e510906911f8b7e5601fba1fc Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Tue, 20 Aug 2024 11:36:57 +1000 Subject: [PATCH 028/104] Fixed not loaded data regions --- src/main/scala/analysis/MemoryModelMap.scala | 4 ++-- src/main/scala/util/RunUtils.scala | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index eb14b983c..2fde6265d 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -149,9 +149,9 @@ class MemoryModelMap { procedureToRegions } - def convertMemoryRegions(memoryRegions: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]], externalFunctions: Map[BigInt, String], globalOffsets: Map[BigInt, BigInt], procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]]): Unit = { + def convertMemoryRegions(memoryRegions: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]], externalFunctions: Map[BigInt, String], globalOffsets: Map[BigInt, BigInt], globalAddresses: Map[BigInt, String], procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]]): Unit = { // map externalFunctions name, value to DataRegion(name, value) and then sort by value - val externalFunctionRgns = externalFunctions.map((offset, name) => resolveInverseGlobalOffset(name, BitVecLiteral(offset, 64), globalOffsets)) + val externalFunctionRgns = (externalFunctions ++ globalAddresses).map((offset, name) => resolveInverseGlobalOffset(name, BitVecLiteral(offset, 64), globalOffsets)) // we should collect all data regions otherwise the ordering might be wrong var dataRgns: Set[DataRegion] = Set.empty diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 07ae59fd3..9136a4432 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -679,7 +679,7 @@ object StaticAnalysis { Logger.info("[!] Running MMM") val mmm = MemoryModelMap() - mmm.convertMemoryRegions(mraResult, mergedSubroutines, globalOffsets, mraSolver.procedureToSharedRegions) + mmm.convertMemoryRegions(mraResult, mergedSubroutines, globalOffsets, globalAddresses, mraSolver.procedureToSharedRegions) mmm.logRegions() Logger.info("[!] Injecting regions") From ac906b90f89ff60bf40c86f4d3150887a6899b87 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Wed, 21 Aug 2024 17:51:28 +1000 Subject: [PATCH 029/104] Fixed duplicate addresses causing tests to fail --- src/main/scala/analysis/MemoryModelMap.scala | 13 ++++++++----- src/main/scala/analysis/RegionInjector.scala | 20 ++++++++------------ src/main/scala/util/RunUtils.scala | 1 - 3 files changed, 16 insertions(+), 18 deletions(-) diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index 2fde6265d..19a372405 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -88,11 +88,11 @@ class MemoryModelMap { * @param name * @param address * @param globalOffsets - * @return DataRegion: a DataRegion representing the actual address + * @return BitVector: a BitVector representing the actual address */ - private def resolveInverseGlobalOffset(name: String, address: BitVecLiteral, globalOffsets: Map[BigInt, BigInt]): DataRegion = { + private def resolveInverseGlobalOffset(name: String, address: BigInt, globalOffsets: Map[BigInt, BigInt]): BigInt = { val inverseGlobalOffsets = globalOffsets.map(_.swap) - var tableAddress = inverseGlobalOffsets.getOrElse(address.value, address.value) + var tableAddress = inverseGlobalOffsets.getOrElse(address, address) // addresses may be layered as in jumptable2 example for which recursive search is required var exitLoop = false while (inverseGlobalOffsets.contains(tableAddress) && !exitLoop) { @@ -104,7 +104,7 @@ class MemoryModelMap { } } - DataRegion(name, BitVecLiteral(tableAddress, 64)) + tableAddress } /** @@ -151,7 +151,10 @@ class MemoryModelMap { def convertMemoryRegions(memoryRegions: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]], externalFunctions: Map[BigInt, String], globalOffsets: Map[BigInt, BigInt], globalAddresses: Map[BigInt, String], procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]]): Unit = { // map externalFunctions name, value to DataRegion(name, value) and then sort by value - val externalFunctionRgns = (externalFunctions ++ globalAddresses).map((offset, name) => resolveInverseGlobalOffset(name, BitVecLiteral(offset, 64), globalOffsets)) + val reversedExternalFunctionRgns = (externalFunctions ++ globalAddresses).map((offset, name) => resolveInverseGlobalOffset(name, offset, globalOffsets) -> name) + val filteredGlobalOffsets = globalAddresses.filterNot((offset, name) => reversedExternalFunctionRgns.contains(offset)) + + val externalFunctionRgns = (reversedExternalFunctionRgns ++ filteredGlobalOffsets).map((offset, name) => DataRegion(name, BitVecLiteral(offset, 64))) // we should collect all data regions otherwise the ordering might be wrong var dataRgns: Set[DataRegion] = Set.empty diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index 942595793..424898093 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -302,18 +302,14 @@ class RegionInjector(domain: mutable.Set[CFGPosition], def transformMemorySections(memorySegment: ArrayBuffer[MemorySection]): ArrayBuffer[MemorySection] = { val newArrayBuffer = ArrayBuffer.empty[MemorySection] - for (elem <- memorySegment) { - elem match { - case mem: MemorySection => - val regions = mmm.findDataObject(mem.address) - if (regions.size == 1) { - newArrayBuffer += MemorySection(regions.head.regionIdentifier, mem.address, mem.size, mem.bytes) - Logger.warn(s"Region ${regions.get.regionIdentifier} found for memory section ${mem.address}") - } else { - newArrayBuffer += mem - Logger.warn(s"No region found for memory section ${mem.address}") - } - case null => + for (mem <- memorySegment) { + val regions = mmm.findDataObject(mem.address) + if (regions.size == 1) { + newArrayBuffer += MemorySection(regions.head.regionIdentifier, mem.address, mem.size, mem.bytes) + Logger.warn(s"Region ${regions.get.regionIdentifier} found for memory section ${mem.address}") + } else { + newArrayBuffer += mem + Logger.warn(s"No region found for memory section ${mem.address}") } } newArrayBuffer diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 9136a4432..11d96afa0 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -34,7 +34,6 @@ import analysis.CfgCommandNode import scala.annotation.tailrec import scala.collection.mutable -import scala.sys.exit /** This file contains the main program execution. See RunUtils.loadAndTranslate for the high-level process. */ From 89cf1c7e5e73c504c22336a8462b3a6dd2c09eda Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Thu, 22 Aug 2024 16:24:59 +1000 Subject: [PATCH 030/104] Initial VSA work --- src/main/scala/analysis/AVLTree.scala | 179 ++++ src/main/scala/analysis/ActualVSA.scala | 752 +++++++++------- src/main/scala/analysis/ActualVSAold.scala | 60 ++ src/main/scala/analysis/Analysis.scala | 6 +- src/main/scala/analysis/BACKUPLAttice | 834 ++++++++++++++++++ src/main/scala/analysis/BitVectorEval.scala | 18 +- .../InterprocSteensgaardAnalysis.scala | 50 +- src/main/scala/analysis/LAST_VSA_BACKUP.scala | 276 ++++++ src/main/scala/analysis/Lattice.scala | 791 ++++++++++++++++- .../scala/analysis/MemoryRegionAnalysis.scala | 167 ++-- .../ReachingDefinitionsAnalysis.scala | 16 +- .../scala/analysis/RegToMemAnalysis.scala | 18 +- src/main/scala/analysis/UtilMethods.scala | 32 +- .../analysis/solvers/AbstractSPAnalysis.scala | 95 ++ src/main/scala/util/RunUtils.scala | 67 +- 15 files changed, 2884 insertions(+), 477 deletions(-) create mode 100644 src/main/scala/analysis/AVLTree.scala create mode 100644 src/main/scala/analysis/ActualVSAold.scala create mode 100644 src/main/scala/analysis/BACKUPLAttice create mode 100644 src/main/scala/analysis/LAST_VSA_BACKUP.scala create mode 100644 src/main/scala/analysis/solvers/AbstractSPAnalysis.scala diff --git a/src/main/scala/analysis/AVLTree.scala b/src/main/scala/analysis/AVLTree.scala new file mode 100644 index 000000000..ef65475a0 --- /dev/null +++ b/src/main/scala/analysis/AVLTree.scala @@ -0,0 +1,179 @@ +package analysis + +/** + * Node of the AVL tree. + * @param key + * @param value + * @param height + * @param left + * @param right + * @tparam K key type + * @tparam V value type + */ +case class Node[K, V](var key: K, var value: V, var height: Int, var left: Option[Node[K, V]], var right: Option[Node[K, V]]) + +/** + * AVL tree implementation. Ref. https://cs.indstate.edu/~kbalaraman/anew.pdf + * @param ordering + * @tparam K key type + * @tparam V value type + */ +class AVLTree[K, V](ordering: Ordering[K]) { + private var root: Option[Node[K, V]] = None + + // Get the height of the node + private def height(node: Option[Node[K, V]]): Int = node.map(_.height).getOrElse(0) + + // Rotate right + private def rotateRight(y: Node[K, V]): Node[K, V] = { + val x = y.left.get + val T2 = x.right + x.right = Some(y) + y.left = T2 + y.height = Math.max(height(y.left), height(y.right)) + 1 + x.height = Math.max(height(x.left), height(x.right)) + 1 + x + } + + // Rotate left + private def rotateLeft(x: Node[K, V]): Node[K, V] = { + val y = x.right.get + val T2 = y.left + y.left = Some(x) + x.right = T2 + x.height = Math.max(height(x.left), height(x.right)) + 1 + y.height = Math.max(height(y.left), height(y.right)) + 1 + y + } + + // Get balance factor of node N + private def getBalance(node: Option[Node[K, V]]): Int = node.map(n => height(n.left) - height(n.right)).getOrElse(0) + + // Insert a key-value pair + def insert(key: K, value: V): Unit = { + def insertNode(node: Option[Node[K, V]], key: K, value: V): Node[K, V] = { + if (node.isEmpty) return Node(key, value, 1, None, None) + + val n = node.get + + if (ordering.lt(key, n.key)) n.left = Some(insertNode(n.left, key, value)) + else if (ordering.gt(key, n.key)) n.right = Some(insertNode(n.right, key, value)) + else { + n.value = value + return n + } + + n.height = 1 + Math.max(height(n.left), height(n.right)) + val balance = getBalance(Some(n)) + + // Left Left Case + if (balance > 1 && ordering.lt(key, n.left.get.key)) return rotateRight(n) + + // Right Right Case + if (balance < -1 && ordering.gt(key, n.right.get.key)) return rotateLeft(n) + + // Left Right Case + if (balance > 1 && ordering.gt(key, n.left.get.key)) { + n.left = Some(rotateLeft(n.left.get)) + return rotateRight(n) + } + + // Right Left Case + if (balance < -1 && ordering.lt(key, n.right.get.key)) { + n.right = Some(rotateRight(n.right.get)) + return rotateLeft(n) + } + + n + } + + root = Some(insertNode(root, key, value)) + } + + // Search for a value by key + def search(key: K): Option[V] = { + def searchNode(node: Option[Node[K, V]], key: K): Option[V] = { + if (node.isEmpty) return None + + val n = node.get + + if (ordering.equiv(key, n.key)) Some(n.value) + else if (ordering.lt(key, n.key)) searchNode(n.left, key) + else searchNode(n.right, key) + } + + searchNode(root, key) + } + + // Delete a key-value pair + def delete(key: K): Unit = { + def minValueNode(node: Node[K, V]): Node[K, V] = { + var current = node + while (current.left.isDefined) current = current.left.get + current + } + + def deleteNode(node: Option[Node[K, V]], key: K): Option[Node[K, V]] = { + if (node.isEmpty) return None + + val n = node.get + + if (ordering.lt(key, n.key)) n.left = deleteNode(n.left, key) + else if (ordering.gt(key, n.key)) n.right = deleteNode(n.right, key) + else { + if (n.left.isEmpty || n.right.isEmpty) { + val temp = if (n.left.isDefined) n.left else n.right + if (temp.isEmpty) return None + else return temp + } else { + val temp = minValueNode(n.right.get) + n.key = temp.key + n.value = temp.value + n.right = deleteNode(n.right, temp.key) + } + } + + n.height = Math.max(height(n.left), height(n.right)) + 1 + val balance = getBalance(Some(n)) + + // Left Left Case + if (balance > 1 && getBalance(n.left) >= 0) return Some(rotateRight(n)) + + // Left Right Case + if (balance > 1 && getBalance(n.left) < 0) { + n.left = Some(rotateLeft(n.left.get)) + return Some(rotateRight(n)) + } + + // Right Right Case + if (balance < -1 && getBalance(n.right) <= 0) return Some(rotateLeft(n)) + + // Right Left Case + if (balance < -1 && getBalance(n.right) > 0) { + n.right = Some(rotateRight(n.right.get)) + return Some(rotateLeft(n)) + } + + Some(n) + } + + root = deleteNode(root, key) + } +} + +// Example usage +object AVLTreeExample extends App { + val avl = new AVLTree[Int, String](Ordering.Int) + avl.insert(10, "Value10") + avl.insert(20, "Value20") + avl.insert(30, "Value30") + avl.insert(40, "Value40") + avl.insert(50, "Value50") + avl.insert(25, "Value25") + + println(avl.search(25)) // Some(Value25) + println(avl.search(100)) // None + + avl.delete(25) + println(avl.search(25)) // None +} diff --git a/src/main/scala/analysis/ActualVSA.scala b/src/main/scala/analysis/ActualVSA.scala index 9954f04a3..2a7b2dac4 100644 --- a/src/main/scala/analysis/ActualVSA.scala +++ b/src/main/scala/analysis/ActualVSA.scala @@ -1,336 +1,416 @@ -package analysis -import ir._ -import util._ -import scala.collection.mutable -import analysis.BitVectorEval._ - -class ActualVSA(program: Program, - constantPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], - mmm: MemoryModelMap) { - - enum Flag { - case CF // Carry Flag - case ZF // Zero Flag - case SF // Sign Flag - case PF // Parity Flag - case AF // Auxiliary Flag - case OF // Overflow Flag - } - - enum Bool3 { - case True - case False - case Maybe - } - - /** - * SI class that represents a strided interval - * s is the stride - * l is the lower bound - * u is the upper bound - * [l, u] is the interval - * [l, u] \ s is the set of values - * 0[l,l] represents the singleton set {l} - */ - case class StridedInterval(s: BitVecLiteral, lb: BitVecLiteral, ub: BitVecLiteral) { - require(smt_bvule(lb, ub) == TrueLiteral, "Lower bound must be less than or equal to upper bound") - - // Meaning of a strided interval - def gamma: Set[BitVecLiteral] = { - smt_interval(lb, ub, s) - } - - override def toString: String = { - s"$s[$lb, $ub]" - } - - // Addition - def +(that: StridedInterval): StridedInterval = { - val newLb = smt_bvadd(this.lb, that.lb) - val newUb = smt_bvadd(this.ub, that.ub) - val newS = gcd(this.s, that.s) - StridedInterval(newS, newLb, newUb) - } - - // Bitwise NOT - def unary_~ : StridedInterval = { - StridedInterval(s, smt_bvnot(ub), smt_bvnot(lb)) - } - - // Bitwise AND - def &(that: StridedInterval): StridedInterval = { - val lbAnd = smt_bvand(this.lb, that.lb) - val ubAnd = smt_bvand(this.ub, that.ub) - StridedInterval(gcd(this.s, that.s), lbAnd, ubAnd) - } - - // Intersection of two strided intervals - def intersect(that: StridedInterval): Option[StridedInterval] = { - val newLb = smt_max(this.lb, that.lb) - val newUb = smt_min(this.ub, that.ub) - val newS = smt_gcd(this.s, that.s) - if (smt_bvule(newLb, newUb) == TrueLiteral) Some(StridedInterval(newS, newLb, newUb)) else None - } - - // join of two or more strided intervals - def join(that: StridedInterval): StridedInterval = { - val newLb = smt_min(this.lb, that.lb) - val newUb = smt_max(this.ub, that.ub) - val newS = gcd(this.s, that.s) - StridedInterval(newS, newLb, newUb) - } - - // Helper function to compute the greatest common divisor - private def gcd(a: BitVecLiteral, b: BitVecLiteral): BitVecLiteral = { - if (b.value == 0) a else gcd(b, smt_bvsmod(a, b)) - } - } - - /** - * A single value set is a map from regions to strided intervals - * @param intervals - */ - case class ValueSet(intervals: Set[StridedInterval]) { - - def gamma: Set[BitVecLiteral] = { - intervals.flatMap(_.gamma) - } - - // Union of two value sets - def union(that: ValueSet): ValueSet = { - ValueSet(this.intervals ++ that.intervals) - } - - // Intersection of two value sets - def intersect(that: ValueSet): ValueSet = { - val newIntervals = for { - a <- this.intervals - b <- that.intervals - inter = a.intersect(b) if inter.isDefined - } yield inter.get - ValueSet(newIntervals) - } - - // Addition of value sets - def +(that: ValueSet): ValueSet = { - val newIntervals = for { - a <- this.intervals - b <- that.intervals - } yield a + b - ValueSet(newIntervals) - } - - // Addition of a constant to a value set - def +(c: BitVecLiteral): ValueSet = { - val newIntervals = for { - a <- this.intervals - } yield StridedInterval(a.s, smt_bvadd(a.lb, c), smt_bvadd(a.ub, c)) // TODO: Should Stride change? - ValueSet(newIntervals) - } - } - - // top element of the lattice - private object ValueSetLattice { - val TOP: ValueSet = ValueSet(Set(StridedInterval(BitVecLiteral(BigInt(1), 64), BitVecLiteral(BigInt(0), 64), BitVecLiteral(BigInt(Long.MaxValue), 64)))) - val BOTTOM: ValueSet = ValueSet(mmm.getAllRegions.map(r => Set())) // TODO: should be all regions mapped to empty set - } - - - case class AlocEnv(allocs: Set[MemoryRegion]) { - def join(that: AlocEnv): AlocEnv = { - AlocEnv(this.allocs ++ that.allocs) - } - } - //private type AbsEnv = mutable.Map[Variable | MemoryRegion, ValueSet] | mutable.Map[MemoryRegion, AlocEnv] | mutable.Map[Flag, Bool3] - //private type AbsEnv = mutable.Map[Variable | MemoryRegion | Flag, ValueSet | AlocEnv | Bool3] - case class AbsEnv( - env1: mutable.Map[Variable, ValueSet], - env2: mutable.Map[MemoryRegion, AlocEnv], - env3: mutable.Map[Flag, Bool3] - ): - def join(that: AbsEnv): AbsEnv = { - AbsEnv( - env1 ++ that.env1, - env2 ++ that.env2, - env3 ++ that.env3 - ) - } - - /** - * ∗(vs, s): Returns a pair of sets (F, P). F represents the set of “fully accessed” a-locs: it - * consists of the a-locs that are of size s and whose starting addresses are in vs. P represents - * the set of “partially accessed” a-locs: it consists of (i) a-locs whose starting addresses are in - * vs but are not of size s, and (ii) a-locs whose addresses are in vs but whose starting addresses - * and sizes do not meet the conditions to be in F. [Reference VSA paper] - * - * @param vsR2 - * @param s - * @return - */ - private def dereference(vsR2: ValueSet, s: BigInt): (Set[MemoryRegion], Set[MemoryRegion]) = { - // TODO: Global memory size can be retrieved from the symbol table and are of size s - // Map addresses to exact memory locations - val fullyAccessedLocations = vsR2.gamma.flatMap(address => mmm.findStackFullAccessesOnly(address.value, s)) - - // Identify partially accessed locations (if any) - val partiallyAccessedLocations = vsR2.gamma.flatMap(address => mmm.findStackPartialAccessesOnly(address.value, s)) - - // Return the set of fully accessed locations and the set of partially accessed locations - (fullyAccessedLocations.diff(partiallyAccessedLocations).asInstanceOf[Set[MemoryRegion]], partiallyAccessedLocations.asInstanceOf[Set[MemoryRegion]]) - } - - private def RemoveLowerBounds(vs: ValueSet): ValueSet = { - val newIntervals = for { - a <- vs.intervals - } yield StridedInterval(a.s, BitVecLiteral(BigInt(0), a.ub.size), a.ub) - ValueSet(newIntervals) - } - - private def RemoveUpperBounds(vs: ValueSet): ValueSet = { - val newIntervals = for { - a <- vs.intervals - } yield StridedInterval(a.s, a.lb, BitVecLiteral(BigInt(Long.MaxValue), a.lb.size)) - ValueSet(newIntervals) - } - - private def joinValueSets(vs1: ValueSet, vs2: ValueSet): ValueSet = { - vs1.union(vs2) - } - - private def meetValueSets(vs1: ValueSet, vs2: ValueSet): ValueSet = { - vs1.intersect(vs2) - } - - def AbstractTransformer(in: AbsEnv, instruction: CFGPosition): AbsEnv = { - instruction match { - case p: Procedure => in - case b: Block => in - case c: Command => - c match - case statement: Statement => - statement match - case localAssign: LocalAssign => - localAssign.rhs match - case binOp: BinaryExpr => - if (binOp.arg1.isInstanceOf[Variable]) { - val R1 = localAssign.lhs - val R2 = binOp.arg1.asInstanceOf[Variable] - val c = evaluateExpression(binOp.arg2, constantPropResult(instruction)) - - // R1 = R2 + c - val out = in - val vs_R2: ValueSet = in.env1.getOrElseUpdate(R2, ValueSetLattice.BOTTOM) - out.env1(R1) = vs_R2 + c.get - out - } else { - in - } - case memoryLoad: MemoryLoad => - memoryLoad.index match - case binOp: BinaryExpr => - if (binOp.arg2.isInstanceOf[Variable]) { - val R1 = localAssign.lhs - val R2 = binOp.arg1.asInstanceOf[Variable] // TODO: Is R2 always a variable? - val out = in - getDefinition(binOp.arg2.asInstanceOf[Variable], instruction, reachingDefs).foreach { - d => - d.rhs match - case binOp2: BinaryExpr => - val c1 = evaluateExpression(binOp2.arg1, constantPropResult(instruction)) - val c2 = evaluateExpression(binOp2.arg2, constantPropResult(instruction)) - // R1 = *(R2 + c1) + c2 - val vs_R2: ValueSet = in.env1(R2) - val s = memoryLoad.size // s is the size of dereference performed by the instruction - val (f: Set[MemoryRegion], p: Set[MemoryRegion]) = dereference(vs_R2 + c1.get, BigInt(s)) - println("VSA") - println(f) - if (p.isEmpty) { - val vs_rhs = f.map(in.env1(_)).reduce(joinValueSets) - out.env1(R1) = vs_rhs + c2.get - } else { - out.env1(R1) = ValueSetLattice.TOP - } - case _ => - } - out - } else { - in - } - case _ => in // TODO: Handle other cases - case variable: Variable => - val R1 = localAssign.lhs - val R2 = variable - // R1 >= R2 - val out = in - val vs_R1 = in.env1.getOrElseUpdate(R1, ValueSetLattice.BOTTOM) - val vs_R2 = in.env1(R2) - val vs_lb = RemoveUpperBounds(vs_R2) - val vs_ub = RemoveLowerBounds(vs_R1) - out.env1(R1) = vs_R1.intersect(vs_lb) - out.env1(R2) = vs_R2.intersect(vs_ub) - out - case bitVecLiteral: BitVecLiteral => - val R1 = localAssign.lhs - val c = bitVecLiteral - // R1 <= c - val vs_c = ValueSet(Set(StridedInterval(smt_gcd(BitVecLiteral(BigInt(0), c.size), c), BitVecLiteral(BigInt(0), c.size), c))) // TODO: Fix ME - val out = in - out.env1(R1) = meetValueSets(in.env1(R1), vs_c) - out - case _ => in // TODO: Handle other cases - case memoryAssign: MemoryAssign => in // TODO: *(R1 + c1) = R2 + c2 - case nop: NOP => in - case assert: Assert => in - case assume: Assume => in - case jump: Jump => in - } - } - - def IntraProceduralVSA(): mutable.Map[CFGPosition, AbsEnv] = { - val worklist = new mutable.Queue[CFGPosition]() - worklist.enqueue(program.mainProcedure) - val allStackRegions: Set[StackRegion] = mmm.getAllStackRegions() - val allDataRegions: Set[DataRegion] = mmm.getAllDataRegions() - val allHeapRegions: Set[HeapRegion] = mmm.getAllHeapRegions() - - val allocatedStackRegions = AlocEnv(allStackRegions) - val allocatedDataRegions = AlocEnv(allDataRegions) - val allocatedHeapRegions = AlocEnv(allHeapRegions) - - val stackManyToOne = allStackRegions.map(r => r -> allocatedStackRegions).toMap - val dataManyToOne = allDataRegions.map(r => r -> allocatedDataRegions).toMap - val heapManyToOne = allHeapRegions.map(r => r -> allocatedHeapRegions).toMap - - val combinedMap = stackManyToOne ++ dataManyToOne ++ heapManyToOne - val flagsToMaybe = Flag.values.map(f => f -> Bool3.Maybe).toMap - - val absEnv_enter = AbsEnv(mutable.Map().withDefault(_ => ValueSetLattice.BOTTOM), mutable.Map() ++ combinedMap, mutable.Map() ++ flagsToMaybe) - val abstractStates = mutable.Map[CFGPosition, AbsEnv](worklist.head -> absEnv_enter) - while(worklist.nonEmpty) { - val n: CFGPosition = worklist.dequeue() - val m = IntraProcIRCursor.succ(n) - for (succ <- m) { - mmm.popContext() - mmm.pushContext(IRWalk.procedure(n).name) - val edge_amc = AbstractTransformer(abstractStates(n), succ) - Propagate(succ, edge_amc) - } - } - - def Propagate(n: CFGPosition, edge_amc: AbsEnv): Unit = { - if (!abstractStates.contains(n)) { - abstractStates(n) = edge_amc - worklist.enqueue(n) - } else { - val oldEnv = abstractStates(n) - val newEnv = oldEnv.join(edge_amc) - if (newEnv != oldEnv) { - abstractStates(n) = newEnv - worklist.enqueue(n) - } - } - } - abstractStates - } -} +//package analysis +//import ir.* +//import util.* +// +//import scala.collection.mutable +//import analysis.BitVectorEval.* +//import analysis.* +// +//class ActualVSA(program: Program, +// constantPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], +// reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], +// mmm: MemoryModelMap) { +// +// // TODO: This assumes no function is called Data or Heap (should be a tuple instead) +// val DATA_REGION_NAME = "Data" +// val HEAP_REGION_NAME = "Heap" +// +// val valueSetLattice: ValueSetLattice[MemRgn] = ValueSetLattice() +// val flagLattice: FlagLattice = FlagLattice() +// +// type MemRgn = String // all record titles +// type aaloc = MemoryRegion +// +// val MEMORY_REGIONS: List[MemRgn] = (Set(DATA_REGION_NAME, HEAP_REGION_NAME) ++ mmm.getAllocsPerProcedure.keySet).toList.sorted +// val ALLOCS: Map[String, Set[aaloc]] = mmm.getAllocsPerProcedure.asInstanceOf[Map[String, Set[aaloc]]] ++ Map("Data" -> mmm.getAllDataRegions.asInstanceOf[Set[aaloc]], "Heap" -> mmm.getAllHeapRegions.asInstanceOf[Set[aaloc]]) +// +// +// private val stackPointer = Register("R31", BitVecType(64)) +//// /** +//// * ∗(vs, s): Returns a pair of sets (F, P). F represents the set of “fully accessed” a-locs: it +//// * consists of the a-locs that are of size s and whose starting addresses are in vs. P represents +//// * the set of “partially accessed” a-locs: it consists of (i) a-locs whose starting addresses are in +//// * vs but are not of size s, and (ii) a-locs whose addresses are in vs but whose starting addresses +//// * and sizes do not meet the conditions to be in F. [Reference VSA paper] +//// * +//// * @param vsR2 +//// * @param s size of the dereference +//// * @return +//// */ +//// def dereference(s: BigInt): (Set[MemoryRegion], Set[MemoryRegion]) = { +//// // TODO: Global memory size can be retrieved from the symbol table and are of size s +//// // Map addresses to exact memory locations +//// val fullyAccessedLocations = stridedInterval.gamma.flatMap(address => mmm.findStackFullAccessesOnly(address.value, s)) +//// +//// // Identify partially accessed locations (if any) +//// val partiallyAccessedLocations = stridedInterval.gamma.flatMap(address => mmm.findStackPartialAccessesOnly(address.value, s)) +//// +//// // Return the set of fully accessed locations and the set of partially accessed locations +//// (fullyAccessedLocations.diff(partiallyAccessedLocations).asInstanceOf[Set[MemoryRegion]], partiallyAccessedLocations.asInstanceOf[Set[MemoryRegion]]) +//// } +//// } +// +// /** +// * Allocs Structure +// * Procedures +// * main -> {alloc1, alloc2, alloc3} +// * foo -> {alloc4, alloc5} +// * Data +// * Data -> {alloc6, alloc7} +// * Heap +// * Heap -> {alloc8, alloc9} +// */ +// case class AbsEnv(): +// var regEnv: mutable.Map[Variable, ValueSet[MemRgn]] = mutable.Map[Variable, ValueSet[MemRgn]]().withDefault(_ => valueSetLattice.bottom) +// var alocEnv: mutable.Map[aaloc, ValueSet[MemRgn]] = mutable.Map[aaloc, ValueSet[MemRgn]]().withDefault(_ => valueSetLattice.bottom) +// var flagEnv: Flag = FlagMap(Map[Flags, Bool3]()) +// +// def join(absEnv: AbsEnv): AbsEnv = { +// val out = AbsEnv() +// // unify regs +// absEnv.regEnv.foreach((k, v) => +// if (regEnv.contains(k)) { +// out.regEnv(k) = valueSetLattice.lub(regEnv(k), v) +// } else { +// out.regEnv(k) = v +// }) +// // unify alocs +// absEnv.alocEnv.foreach((k, v) => +// if (alocEnv.contains(k)) { +// out.alocEnv(k) = valueSetLattice.lub(alocEnv(k), v) +// } else { +// out.alocEnv(k) = v +// }) +// // unify flags +// out.flagEnv = flagLattice.lub(flagEnv, absEnv.flagEnv) +// out +// } +// +// override def toString: String = { +// val regEnvStr = regEnv.map((k, v) => s"$k -> $v").mkString("\n") +// val alocEnvStr = alocEnv.map((k, v) => s"$k -> $v").mkString("\n") +// val flagEnvStr = flagEnv.toString +// s"RegEnv:\n$regEnvStr\nAlocEnv:\n$alocEnvStr\nFlagEnv:\n$flagEnvStr" +// } +// +// // TODO: This is not very accurate and would need a better pattern matching +// def exprToRegion(expr: Expr, n: CFGPosition): Option[MemoryRegion] = { +// expr match { +// case binOp: BinaryExpr if binOp.arg1 == stackPointer => +// evaluateExpression(binOp.arg2, constantPropResult(n)) match { +// case Some(b: BitVecLiteral) => mmm.findStackObject(b.value) +// case None => None +// } +// case _ => +// evaluateExpression(expr, constantPropResult(n)) match { +// case Some(b: BitVecLiteral) => mmm.findDataObject(b.value) +// case None => None +// } +// } +// } +// +// def evaluateValueSet(expr: Expr, absEnv: AbsEnv, n: CFGPosition): ValueSet[MemRgn] = { +// expr match +// case literal: Literal => +// literal match +// case lit: BoolLit => ??? +// case BitVecLiteral(value, size) => +// val si = valueSetLattice.lattice.singletonSI(value, size) +// val memoryRegionMap = MEMORY_REGIONS.map(i => i -> si).toMap +// VS(memoryRegionMap) +// case IntLiteral(value) => ??? +// case Extract(end, start, body) => ??? +// case Repeat(repeats, body) => ??? +// case ZeroExtend(extension, body) => ??? +// case SignExtend(extension, body) => ??? +// case UnaryExpr(op, arg) => +// arg match { +// case v1: Variable => +// val VS_v1 = absEnv.regEnv(v1) +// valueSetLattice.applyOp(op, VS_v1) +// case _ => valueSetLattice.applyOp(op, evaluateValueSet(arg, absEnv, n)) +// } +// case BinaryExpr(op, arg1, arg2) => +// (arg1, arg2) match { +// case (v1: Variable, v2: Variable) => +// val VS_v1 = absEnv.regEnv(v1) +// val VS_v2 = absEnv.regEnv(v2) +// valueSetLattice.applyOp(op, VS_v1, Left(VS_v2)) +// case (v1: Variable, c: BitVecLiteral) => +// val VS_v1 = absEnv.regEnv(v1) +// valueSetLattice.applyOp(op, VS_v1, Right(c)) +// case (c: BitVecLiteral, v1: Variable) => +// val VS_v1 = absEnv.regEnv(v1) +// valueSetLattice.applyOp(op, VS_v1, Right(c)) +// case _ => +// val VS_arg1 = evaluateValueSet(arg1, absEnv, n) +// val VS_arg2 = evaluateValueSet(arg2, absEnv, n) +// valueSetLattice.applyOp(op, VS_arg1, Left(VS_arg2)) +// } +// case MemoryStore(mem, index, value, endian, size) => ??? +// case MemoryLoad(mem, index, endian, size) => +// val region = exprToRegion(index, n) +// if (region.isDefined) { +// absEnv.alocEnv(region.get) +// } else { +// valueSetLattice.bottom +// } +// case Memory(name, addressSize, valueSize) => ??? +// case variable: Variable => absEnv.regEnv(variable) +// } +// +// /** Default implementation of eval. +// */ +// def eval(cmd: Command, s: AbsEnv, n: CFGPosition): Map[Variable | MemoryRegion, Set[Value]] = { +// Logger.debug(s"eval: $cmd") +// Logger.debug(s"state: $s") +// Logger.debug(s"node: $n") +// cmd match +// case localAssign: LocalAssign => +// localAssign.rhs match +// case memoryLoad: MemoryLoad => +// exprToRegion(memoryLoad.index, n) match +// case Some(r: MemoryRegion) => +// // this is an exception to the rule and only applies to data regions +// evaluateExpression(memoryLoad.index, constantPropResult(n)) match +// case Some(bitVecLiteral: BitVecLiteral) => +// m = m + (r -> Set(getValueType(bitVecLiteral))) +// m = m + (localAssign.lhs -> m(r)) +// m +// +// val vs_r1 = s.regEnv(localAssign.lhs) +// val singleton = Set(getValueType(bitVecLiteral)) +// +// valueSetLattice.lub() +// +// case None => +// m = m + (localAssign.lhs -> m(r)) +// m +// +// +// case None => +// Logger.warn("could not find region for " + localAssign) +// m +// case e: Expr => +// evaluateExpression(e, constantPropResult(n)) match { +// case Some(bv: BitVecLiteral) => +// m = m + (localAssign.lhs -> Set(getValueType(bv))) +// m +// case None => +// Logger.warn("could not evaluate expression" + e) +// m +// } +// case memAssign: MemoryAssign => +// memAssign.rhs.index match +// case binOp: BinaryExpr => +// val region: Option[MemoryRegion] = exprToRegion(binOp, n) +// region match +// case Some(r: MemoryRegion) => +// val storeValue = memAssign.rhs.value +// evaluateExpression(storeValue, constantPropResult(n)) match +// case Some(bitVecLiteral: BitVecLiteral) => +// m = m + (r -> Set(getValueType(bitVecLiteral))) +// m +// /* +// // TODO constant prop returned BOT OR TOP. Merge regions because RHS could be a memory loaded address +// case variable: Variable => +// s + (r -> s(variable)) +// */ +// case None => +// storeValue.match { +// case v: Variable => +// m = m + (r -> m(v)) +// m +// case _ => +// Logger.warn(s"Too Complex: $storeValue") // do nothing +// m +// } +// case None => +// Logger.warn("could not find region for " + memAssign) +// m +// case _ => +// m +// case _ => +// m +// } +// +// def AbstractTransformer(in: AbsEnv, n: CFGPosition): AbsEnv = { +// if (IRWalk.procedure(n) == n) { +// mmm.pushContext(n.asInstanceOf[Procedure].name) +// in +// } else if (IRWalk.procedure(n).end == n) { +// mmm.popContext() +// in +// } else n match +// case command: Command => +// eval(command, in, n) +// case _ => +// in +//// instruction match { +//// case p: Procedure => in +//// case b: Block => in +//// case c: Command => +//// c match +//// case statement: Statement => +//// statement match +//// case localAssign: LocalAssign => +//// localAssign.rhs match +//// case binOp: BinaryExpr => +//// if (binOp.arg1.isInstanceOf[Variable]) { +//// val R1 = localAssign.lhs +//// val R2 = binOp.arg1.asInstanceOf[Variable] +//// val c = evaluateExpression(binOp.arg2, constantPropResult(instruction)) +//// if (c.isDefined) { +//// +//// // R1 = R2 + c +//// val out = in +//// val vs_R2: ValueSet[MemRgn] = in.regEnv(R2) +//// out.regEnv(R1) = valueSetLattice.add(vs_R2, c.get) +//// return out +//// } +//// } +//// in +//// case memoryLoad: MemoryLoad => +//// memoryLoad.index match +//// case binOp: BinaryExpr => +//// if (binOp.arg2.isInstanceOf[Variable]) { +//// val R1 = localAssign.lhs +//// val R2 = binOp.arg1.asInstanceOf[Variable] // TODO: Is R2 always a variable? +//// val out = in +//// getDefinition(binOp.arg2.asInstanceOf[Variable], instruction, reachingDefs).foreach { +//// d => +//// d.rhs match +//// case binOp2: BinaryExpr => +//// val c1 = evaluateExpression(binOp2.arg1, constantPropResult(instruction)) +//// val c2 = evaluateExpression(binOp2.arg2, constantPropResult(instruction)) +//// // R1 = *(R2 + c1) + c2 +//// val vs_R2: ValueSet[String] = in.regEnv(R2) +//// val s = memoryLoad.size // s is the size of dereference performed by the instruction +//// val (f: Set[MemoryRegion], p: Set[MemoryRegion]) = valueSetLattice.dereference(BigInt(s), vs_R2, mmm) +//// if (p.isEmpty) { +//// val vs_rhs = f.map(r => in.alocEnv(r)).foldLeft(valueSetLattice.bottom)((a, b) => valueSetLattice.lub(a, b)) +//// out.regEnv(R1) = valueSetLattice.add(vs_rhs, c2.get) +//// } else { +//// out.regEnv(R1) = valueSetLattice.top +//// } +//// case _ => +//// } +//// out +//// } else { +//// in +//// } +//// case _ => in // TODO: Handle other cases +//// case variable: Variable => in +////// val R1 = localAssign.lhs +////// val R2 = variable +////// // R1 >= R2 +////// val out = in +////// val vs_R1 = in.env1.getOrElseUpdate(R1, ValueSetLattice.BOTTOM) +////// val vs_R2 = in.env1(R2) +////// val vs_lb = vs_R2.removeUpperBounds() +////// val vs_ub = vs_R1.removeLowerBounds() +////// out.env1(R1) = vs_R1.meet(vs_lb) +////// out.env1(R2) = vs_R2.meet(vs_ub) +////// out +//// case bitVecLiteral: BitVecLiteral => in +////// val R1 = localAssign.lhs +////// val c = bitVecLiteral +////// // R1 <= c +////// // from 0 to c, all value sets are possible (ie. stack, global) TODO: this may be wrong because of the _ join _? +////// val interval = bitVec_interval(BitVecLiteral(0, c.size), c, BitVecLiteral(1, c.size)) +////// val regions: mutable.Set[MemoryRegion] = mutable.Set() +////// println(c) +////// interval.foreach(v => +////// val dataObject = mmm.findDataObject(v.value) +////// if dataObject.isDefined then regions.add(dataObject.get) +////// ) +////// TOP_STRIDE.gamma.map(v => regions.add(mmm.findStackObject(v.value).get)) +////// +////// val allValueSets: mutable.Set[ValueSet] = mutable.Set() +////// regions.foreach(r => allValueSets.add(in.env2(r).getAAlloc(r).valueSet)) +////// val vs_c = allValueSets.fold(ValueSetLattice.BOTTOM)(_ join _) +////// val out = in +////// out.env1(R1) = in.env1(R1).meet(vs_c) +////// out +//// +////// val vs_c = ValueSet(Set(StridedInterval(smt_gcd(BitVecLiteral(BigInt(0), c.size), c), BitVecLiteral(BigInt(0), c.size), c))) // TODO: Fix ME +////// val out = in +////// out.env1(R1) = in.env1(R1).meet(vs_c) +////// out +//// case _ => in // TODO: Handle other cases +//// case memoryAssign: MemoryAssign => +//// val out = in +//// // TODO: *(R1 + c1) = R2 + c2 +//// memoryAssign.rhs.index match { +//// case binaryExpr: BinaryExpr => +//// binaryExpr.arg2 match { +//// case bitVecLiteral: BitVecLiteral => +//// memoryAssign.rhs.value match { +//// case binaryExprRHS: BinaryExpr => +//// binaryExprRHS.arg2 match { +//// case bitVecLiteralRHS: BitVecLiteral => +//// val R1 = binaryExpr.arg1.asInstanceOf[Variable] +//// val c1 = bitVecLiteral +//// val R2 = binaryExprRHS.arg1.asInstanceOf[Variable] +//// val c2 = bitVecLiteralRHS +//// +//// val vs_R1: ValueSet[MemRgn] = in.regEnv(R1) +//// val vs_R2: ValueSet[MemRgn] = in.regEnv(R2) +//// val proc: Procedure = IRWalk.procedure(instruction) +//// val (f: Set[MemoryRegion], p: Set[MemoryRegion]) = valueSetLattice.dereference(BigInt(memoryAssign.lhs.valueSize), valueSetLattice.add(vs_R1, c1), mmm) +//// +//// if (f.size == 1 && p.size == 0) { // TODO: must check if f has no heap or recursive proc aalocs +//// out.alocEnv(f.head) = valueSetLattice.add(vs_R2, c2) // strong update +//// } else { +//// f.foreach(v => out.alocEnv(v) = valueSetLattice.lub(out.alocEnv(v), valueSetLattice.add(vs_R2, c2))) // weak update +//// } +//// p.foreach(v => out.alocEnv(v) = valueSetLattice.top) // Set partial accesses to top +//// case _ => +//// } +//// case _ => +//// } +//// case _ => // TODO: Should we evaluate here? +//// } +//// case _ => // // TODO: Should we evaluate here? +//// } +//// out +//// case nop: NOP => in +//// case assert: Assert => in +//// case assume: Assume => in +//// case jump: Jump => in +//// } +// } +// +// def IntraProceduralVSA(): mutable.Map[CFGPosition, AbsEnv] = { +// val worklist = new mutable.Queue[CFGPosition]() +// worklist.enqueue(program.mainProcedure) +// +// val absEnv_enter = AbsEnv() +// val abstractStates = mutable.Map[CFGPosition, AbsEnv](worklist.head -> absEnv_enter) +// while(worklist.nonEmpty) { +// val n: CFGPosition = worklist.dequeue() +// val m = IntraProcIRCursor.succ(n) +// for (succ <- m) { +// mmm.popContext() +// mmm.pushContext(IRWalk.procedure(n).name) +// val edge_amc = AbstractTransformer(abstractStates(n), succ) +// Propagate(succ, edge_amc) +// } +// } +// +// def Propagate(n: CFGPosition, edge_amc: AbsEnv): Unit = { +// if (!abstractStates.contains(n)) { +// abstractStates(n) = edge_amc +// worklist.enqueue(n) +// } else { +// val oldEnv = abstractStates(n) +// val newEnv = oldEnv.join(edge_amc) +// if (newEnv != oldEnv) { +// abstractStates(n) = newEnv +// worklist.enqueue(n) +// } +// } +// } +// abstractStates +// } +//} diff --git a/src/main/scala/analysis/ActualVSAold.scala b/src/main/scala/analysis/ActualVSAold.scala new file mode 100644 index 000000000..2de052c1c --- /dev/null +++ b/src/main/scala/analysis/ActualVSAold.scala @@ -0,0 +1,60 @@ +//package analysis +// +//import ir.* +//import analysis.solvers._ +// +//import scala.collection.immutable +// +//trait ActualVSA(program: Program) { +// +// val powersetLattice: PowersetLattice[Variable] = PowersetLattice() +// +// val lattice: MapLattice[CFGPosition, Set[Variable], PowersetLattice[Variable]] = MapLattice(powersetLattice) +// +// val domain: Set[CFGPosition] = Set.empty ++ program +// +// private val stackPointer = Register("R31", BitVecType(64)) +// private val linkRegister = Register("R30", BitVecType(64)) +// private val framePointer = Register("R29", BitVecType(64)) +// +// private val ignoreRegions: Set[Expr] = Set(linkRegister, framePointer, stackPointer) +// +// /** Default implementation of eval. +// */ +// def eval(cmd: Command, s: Set[Variable]): Set[Variable] = { +// var m = s +// cmd match { +// case assume: Assume => +// m.diff(assume.body.variables) +// case assert: Assert => +// m.diff(assert.body.variables) +// case memoryAssign: MemoryAssign => +// m.diff(memoryAssign.lhs.variables ++ memoryAssign.rhs.variables) +// case indirectCall: IndirectCall => +// m - indirectCall.target +// case localAssign: LocalAssign => +// m = m.diff(localAssign.rhs.variables) +// if ignoreRegions.contains(localAssign.lhs) then m else m + localAssign.lhs +// case _ => +// m +// } +// } +// +// /** Transfer function for state lattice elements. +// */ +// def localTransfer(n: CFGPosition, s: Set[Variable]): Set[Variable] = n match { +// case cmd: Command => +// eval(cmd, s) +// case _ => s // ignore other kinds of nodes +// } +// +// /** Transfer function for state lattice elements. +// */ +// def transfer(n: CFGPosition, s: Set[Variable]): Set[Variable] = localTransfer(n, s) +//} +// +//class ANRAnalysisSolver(program: Program) extends ANRAnalysis(program) +// with IRIntraproceduralForwardDependencies +// with Analysis[Map[CFGPosition, Set[Variable]]] +// with SimpleWorklistFixpointSolver[CFGPosition, Set[Variable], PowersetLattice[Variable]] { +//} \ No newline at end of file diff --git a/src/main/scala/analysis/Analysis.scala b/src/main/scala/analysis/Analysis.scala index e26817dc4..6294ed46e 100644 --- a/src/main/scala/analysis/Analysis.scala +++ b/src/main/scala/analysis/Analysis.scala @@ -164,10 +164,10 @@ trait ConstantPropagationWithSSA(val program: Program, val reachingDefs: Map[CFG // assignments case la: LocalAssign => val lhsWrappers = s.collect { - case (k, v) if RegisterVariableWrapper(k.variable, k.assigns) == RegisterVariableWrapper(la.lhs, getDefinition(la.lhs, r, reachingDefs)) => (k, v) + case (k, v) if RegisterWrapperPartialEquality(k.variable, k.assigns) == RegisterWrapperPartialEquality(la.lhs, getDefinition(la.lhs, r, reachingDefs)) => (k, v) } if (lhsWrappers.nonEmpty) { - s ++ lhsWrappers.map((k, v) => (k, v.union(eval(la.rhs, s, r)))) + s ++ lhsWrappers.map((k, v) => (RegisterWrapperEqualSets(k.variable, k.assigns ++ getDefinition(la.lhs, r, reachingDefs)), v.union(eval(la.rhs, s, r)))) } else { s + (RegisterWrapperEqualSets(la.lhs, getDefinition(la.lhs, r, reachingDefs)) -> eval(la.rhs, s, n)) } @@ -190,5 +190,5 @@ trait ConstantPropagationWithSSA(val program: Program, val reachingDefs: Map[CFG class ConstantPropagationSolverWithSSA(program: Program, reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])]) extends ConstantPropagationWithSSA(program, reachingDefs) with SimplePushDownWorklistFixpointSolver[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], MapLattice[RegisterWrapperEqualSets, Set[BitVecLiteral], ConstantPropagationLatticeWithSSA]] - with IRIntraproceduralForwardDependencies + with IRInterproceduralForwardDependencies with Analysis[Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]]] diff --git a/src/main/scala/analysis/BACKUPLAttice b/src/main/scala/analysis/BACKUPLAttice new file mode 100644 index 000000000..b68fae9c1 --- /dev/null +++ b/src/main/scala/analysis/BACKUPLAttice @@ -0,0 +1,834 @@ +package analysis + +import ir._ +import analysis.BitVectorEval._ +import util.Logger +import math.pow + +/** Basic lattice + */ +trait Lattice[T]: + + type Element = T + /** The bottom element of this lattice. + */ + val bottom: T + + /** The top element of this lattice. Default: not implemented. + */ + def top: T = ??? + + /** The least upper bound of `x` and `y`. + */ + def lub(x: T, y: T): T + + /** Returns true whenever `x` <= `y`. + */ + def leq(x: T, y: T): Boolean = lub(x, y) == y // rarely used, but easy to implement :-) + +//trait StridedInterval[+T] +// +//case class SI[T](s: T, l: T, u: T) extends StridedInterval[T] { +// override def toString = s"SI $s [$l, $u]" +//} +// +//case object SIBottom extends StridedInterval[BitVecLiteral] { +// override def toString = "SIBot" +//} + +///** +// * SI class that represents a strided interval +// * s is the stride +// * l is the lower bound +// * u is the upper bound +// * [l, u] is the interval +// * [l, u] \ s is the set of values +// * 0[l,l] represents the singleton set {l} +// */ +//class StridedIntervalLattice extends Lattice[StridedInterval[BitVecLiteral]] { +// val lowestPossibleValue: BitVecLiteral = BitVecLiteral(0, 64) +// val highestPossibleValue: BitVecLiteral = BitVecLiteral(Long.MaxValue - 1, 64) +// +// override val bottom: StridedInterval[BitVecLiteral] = SIBottom +// override def top: StridedInterval[BitVecLiteral] = SI(BitVecLiteral(1, 64), lowestPossibleValue, highestPossibleValue) +// +// def gamma(x: StridedInterval[BitVecLiteral]): Set[BitVecLiteral] = x match { +// case SIBottom => Set.empty +// case SI(s, l, u) => +// bitVec_interval(l, u, s) +// } +// +// /** S1[L1, U1] join S2[L2, U2] -> gcd(S1, S2)[min(L1, L2), max(U1, U2)] */ +// override def lub(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => t +// case (t, SIBottom) => t +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// SI(bitVec_gcd(s1, s2), bitVec_min(l1, l2), bitVec_max(u1, u2)) +// } +// } +// +// /** S1[L1, U1] meet S2[L2, U2] -> gcd(S1, S2)[max(L1, L2), min(U1, U2)] */ +// def meet(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => SIBottom +// case (t, SIBottom) => SIBottom +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// SI(bitVec_gcd(s1, s2), bitVec_max(l1, l2), bitVec_min(u1, u2)) +// } +// } +// +// /** Addition +// * Addition defined in page 6 Figure 2 of: https://dl.acm.org/doi/pdf/10.1145/1111542.1111560 +// * */ +// def add(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => t +// case (t, SIBottom) => t +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// val lbound = smt_bvadd(l1, l2) +// val ubound = smt_bvadd(u1, u2) +// val s = bitVec_gcd(s1, s2) +// if (smt_bvsle(ubound, highestPossibleValue) == TrueLiteral && smt_bvsge(lbound, lowestPossibleValue) == TrueLiteral) { +// SI(s, lbound, ubound) +// } else { +// throw new IllegalArgumentException(s"Addition overflow: $lbound, $ubound") +// } +// } +// } +// +// /** Unary Minus */ +// def unaryMinus(x: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// x match { +// case SIBottom => SIBottom +// case SI(s, l, u) => +// if (smt_bvcomp(l, u) == BitVecLiteral(1, 1) && (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(1, 1) && smt_bvcomp(u, lowestPossibleValue) == BitVecLiteral(1, 1))) { +// SI(BitVecLiteral(0, 64), lowestPossibleValue, lowestPossibleValue) +// } else if (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(0, 1)) { +// SI(s, smt_bvneg(u), smt_bvneg(l)) +// } +// else { +// SI(BitVecLiteral(1, 64), lowestPossibleValue, highestPossibleValue) +// } +// } +// } +// +// /** Substraction */ +// def sub(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// add(x, unaryMinus(y)) +// } +// +// /** Widen */ +// def widen(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// /* formula for widening: +// given: s1[lb1, ub1] and s2[lb2, ub2] +// then: gcd(s1, s2)[min(lb1, lb2), max(ub1, ub2)] +// where: min(lb1, lb2) = lb1 if lb1 <= lb2 +// and: min(lb1, lb2) = minPossibleValue otherwise +// where: max(ub1, ub2) = ub1 if ub1 >= ub2 +// and: max(ub1, ub2) = maxPossibleValue otherwise +// +// assuming: +// minPossibleValue = lowestPossibleValue +// maxPossibleValue = highestPossibleValue + (lb - 1) mod s +// */ +// (x, y) match { +// case (SIBottom, t) => ??? +// case (t, SIBottom) => ??? +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// val s = bitVec_gcd(s1, s2) +// val l = if (smt_bvule(l1, l2) == TrueLiteral) l1 else lowestPossibleValue +// val u = if (smt_bvuge(u1, u2) == TrueLiteral) u1 else smt_bvsmod(smt_bvadd(highestPossibleValue, smt_bvsub(l1, BitVecLiteral(1, 64))), s) +// SI(s, l, u) +// } +// } +// +// /** +// * Calculating strided interval for a list of values using accumulative gcd. +// * @param x the list of values +// * @return the strided interval representing the values in the list +// */ +// def valuesToSI(x: List[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// if (x.isEmpty) { +// SIBottom +// } else { +// val l = bitVec_min(x) +// val u = bitVec_max(x) +// val initialStride = smt_bvsub(u, l) +// val stride = x.foldLeft(initialStride) { +// case (acc, v) => bitVec_gcd(smt_bvsub(v, l), acc) +// } +// SI(stride, l, u) +// } +// } +//} + + +trait StridedWrappedInterval + +case class SI(s: BigInt, l: BigInt, u: BigInt, w: BigInt) extends StridedWrappedInterval { + if (l == u) { + require(s == 0) + } + override def toString = s"SASI $s [$l, $u] $w" +} + +case object SIBottom extends StridedWrappedInterval { + override def toString = "SASIBot" +} + +// TOP is 1[0^w, 1^w]w +case object SITop extends StridedWrappedInterval { + override def toString = "SASITop" +} + +class SASILattice extends Lattice[StridedWrappedInterval] { + val lowestPossibleValue: BigInt = 0 + val highestPossibleValue: BigInt = Long.MaxValue - 1 + + override val bottom: StridedWrappedInterval = SIBottom + + override def top: StridedWrappedInterval = SITop + +// def gamma(x: StridedWrappedInterval): Set[BitVecLiteral] = x match { +// case SIBottom => Set.empty +// case SI(s, l, u, w) => +// if (s == BitVecLiteral(0, 64)) { // singleton set +// Set(l) +// } else { +// bitVec_interval(l, u, s) +// } +// } + + def isSingleValue(x: StridedWrappedInterval): Boolean = x match { + case SI(s, l, u, w) => s == 0 && l == u + case _ => false + } + + def modularPlus(a: BigInt, b: BigInt, w: BigInt): BigInt = { + (a + b) mod BigInt(2).pow(w.toInt) + } + + def modularMinus(a: BigInt, b: BigInt, w: BigInt): BigInt = { + (a - b) mod BigInt(2).pow(w.toInt) + } + + def modularLEQ(a: BigInt, b: BigInt, x: BigInt, w: BigInt): Boolean = { + modularMinus(a, x, w) <= modularMinus(b, x, w) + } + + def membershipFunction(v: BigInt, r: StridedWrappedInterval): Boolean = { + r match { + case SIBottom => false + case SITop => true + case SI(sr, lb, ub, w) => + modularLEQ(v, ub, lb, w) && (modularMinus(v, lb, w) mod sr) == 0 + } + } + + def cardinalityFunction(r: StridedWrappedInterval, w: BigInt): BigInt = { + r match { + case SIBottom => 0 + case SITop => BigInt(2).pow(w.toInt) + case SI(sr, lb, ub, w) => ((ub - lb + 1) / sr) // TODO: this may need to be a math.floor operation + } + } + + def orderingOperator(r: StridedWrappedInterval, t: StridedWrappedInterval): Boolean = { + if (r == SITop && t != SITop) { + false + } else if (r == SIBottom || t == SITop) { + true + } else { + (r, t) match { + case (SI(sr, a, b, w1), SI(st, c, d, w2)) => + if ((a == c) && (b == d) && ((sr mod st) == 0)) { + return true + } + membershipFunction(a, t) && membershipFunction(b, t) && (!membershipFunction(c, r) || !membershipFunction(d, r)) && ((a - c) mod st) == 0 && (sr mod st) == 0 + case _ => false + } + } + } + + /** S1[L1, U1] join S2[L2, U2] -> gcd(S1, S2)[min(L1, L2), max(U1, U2)] */ + override def lub(r: StridedWrappedInterval, t: StridedWrappedInterval): StridedWrappedInterval = { +// (s, t) match { +// case (SIBottom, t) => t +// case (t, SIBottom) => t +// case (SI(a, b, u1, w1), SI(s2, c, d, w2)) => +// var u: BigInt = 0 +// var l: BigInt = 0 +// if (isSingleValue(s) && isSingleValue(t)) { +// val si1_card = WCardMod() +// val si2_card = WCardMod() +// if (si1_card <= si2_card) { +// l = a +// u = d +// } else { +// l = c +// u = b +// } +// +// SI(u - l, l, u, ) +// } +// } + + (r, t) match { + case (SI(sr, a, b, w1), SI(st, c, d, w2)) => + assert(w1 == w2) + val w = w1 // TODO: should this be the largest? + if (orderingOperator(r, t)) { + return t + } + if (orderingOperator(t, r)) { + return r + } + if (membershipFunction(a, t) && membershipFunction(b, t) && membershipFunction(c, r) && membershipFunction(d, r)) { + return SITop + } + if (membershipFunction(c, r) && membershipFunction(b, t) && !membershipFunction(a, t) && !membershipFunction(d, r)) { + return SI(sr.gcd(st).gcd(modularMinus(d, a, w)), a, d, w) + } + if (membershipFunction(a, t) && membershipFunction(d, r) && !membershipFunction(c, r) && !membershipFunction(b, t)) { + return SI(sr.gcd(st).gcd(modularMinus(b, c, w)), c, b, w) + } + val sad = SI(sr.gcd(st).gcd(modularMinus(d, a, w)), a, d, w) + val scb = SI(sr.gcd(st).gcd(modularMinus(b, c, w)), c, b, w) + if (!membershipFunction(a, t) && !membershipFunction(d, r) && !membershipFunction(c, r) && !membershipFunction(b, t) && cardinalityFunction(sad, w) <= cardinalityFunction(scb, w)) { + return sad + } + return scb + case _ => ??? + } + } + + def singletonSI(v: BigInt, w: BigInt): StridedWrappedInterval = { + SI(0, v, v, w) + } + + /** + * s + t = + * BOT if s = BOT or t = BOT + * gcd(s, t)(|a +w c, b +w d|) if s = (|a, b|), t = (|c, d|) and #s + #t <= 2^w + * @param s + * @param t + * @return + */ + def add(s: StridedWrappedInterval, t: StridedWrappedInterval): StridedWrappedInterval = { + (s, t) match { + case (SIBottom, _) => SIBottom // TODO: is this correct? + case (_, SIBottom) => SIBottom // TODO: is this correct? + case (SI(ss, a, b, w1), SI(st, c, d, w2)) if (cardinalityFunction(s, w1) + cardinalityFunction(t, w2)) <= BigInt(2).pow(w1.toInt) => + assert(w1 == w2) + return SI(ss.gcd(st), modularPlus(a, c, w1), modularPlus(b, d, w1), w1) + case _ => SITop + } + } + + def add(s: StridedWrappedInterval, t: BigInt, w: BigInt): StridedWrappedInterval = { + (s, t) match { + case (SIBottom, _) => SIBottom // TODO: is this correct? + case (SI(ss, a, b, w1), t) => + return add(s, singletonSI(t, w)) + case _ => SITop + } + } + + + + +// /** S1[L1, U1] meet S2[L2, U2] -> gcd(S1, S2)[max(L1, L2), min(U1, U2)] */ +// def meet(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => SIBottom +// case (t, SIBottom) => SIBottom +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// SI(bitVec_gcd(s1, s2), bitVec_max(l1, l2), bitVec_min(u1, u2)) +// } +// } +// +// /** Addition +// * Addition defined in page 6 Figure 2 of: https://dl.acm.org/doi/pdf/10.1145/1111542.1111560 +// * */ +// def add(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => t +// case (t, SIBottom) => t +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// val lbound = smt_bvadd(l1, l2) +// val ubound = smt_bvadd(u1, u2) +// val s = bitVec_gcd(s1, s2) +// if (smt_bvsle(ubound, highestPossibleValue) == TrueLiteral && smt_bvsge(lbound, lowestPossibleValue) == TrueLiteral) { +// SI(s, lbound, ubound) +// } else { +// throw new IllegalArgumentException(s"Addition overflow: $lbound, $ubound") +// } +// } +// } +// +// /** Unary Minus */ +// def unaryMinus(x: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// x match { +// case SIBottom => SIBottom +// case SI(s, l, u) => +// if (smt_bvcomp(l, u) == BitVecLiteral(1, 1) && (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(1, 1) && smt_bvcomp(u, lowestPossibleValue) == BitVecLiteral(1, 1))) { +// SI(BitVecLiteral(0, 64), lowestPossibleValue, lowestPossibleValue) +// } else if (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(0, 1)) { +// SI(s, smt_bvneg(u), smt_bvneg(l)) +// } +// else { +// SI(BitVecLiteral(1, 64), lowestPossibleValue, highestPossibleValue) +// } +// } +// } +// +// /** Substraction */ +// def sub(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// add(x, unaryMinus(y)) +// } +// +// /** Widen */ +// def widen(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// /* formula for widening: +// given: s1[lb1, ub1] and s2[lb2, ub2] +// then: gcd(s1, s2)[min(lb1, lb2), max(ub1, ub2)] +// where: min(lb1, lb2) = lb1 if lb1 <= lb2 +// and: min(lb1, lb2) = minPossibleValue otherwise +// where: max(ub1, ub2) = ub1 if ub1 >= ub2 +// and: max(ub1, ub2) = maxPossibleValue otherwise +// +// assuming: +// minPossibleValue = lowestPossibleValue +// maxPossibleValue = highestPossibleValue + (lb - 1) mod s +// */ +// (x, y) match { +// case (SIBottom, t) => ??? +// case (t, SIBottom) => ??? +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// val s = bitVec_gcd(s1, s2) +// val l = if (smt_bvule(l1, l2) == TrueLiteral) l1 else lowestPossibleValue +// val u = if (smt_bvuge(u1, u2) == TrueLiteral) u1 else smt_bvsmod(smt_bvadd(highestPossibleValue, smt_bvsub(l1, BitVecLiteral(1, 64))), s) +// SI(s, l, u) +// } +// } +// +// /** +// * Calculating strided interval for a list of values using accumulative gcd. +// * +// * @param x the list of values +// * @return the strided interval representing the values in the list +// */ +// def valuesToSI(x: List[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// if (x.isEmpty) { +// SIBottom +// } else { +// val l = bitVec_min(x) +// val u = bitVec_max(x) +// val initialStride = smt_bvsub(u, l) +// val stride = x.foldLeft(initialStride) { +// case (acc, v) => bitVec_gcd(smt_bvsub(v, l), acc) +// } +// SI(stride, l, u) +// } +// } +} + +trait ValueSet[+T] + +case class VS[T](m: Map[T, StridedWrappedInterval]) extends ValueSet[T] { + override def toString: String = m.toString +} + +case object VSBottom extends ValueSet[Nothing] { + override def toString = "VSBot" +} + +case object VSTop extends ValueSet[Nothing] { + override def toString = "VSTop" +} + +/** The lattice of integers with the standard ordering. + */ +class ValueSetLattice[T] extends Lattice[ValueSet[T]] { + + override val bottom: ValueSet[T] = VSBottom + override def top: ValueSet[T] = VSTop + + val lattice: SASILattice = SASILattice() + + override def lub(x: ValueSet[T], y: ValueSet[T]): ValueSet[T] = { + (x, y) match { + case (VSBottom, t) => t + case (t, VSBottom) => t + case (VSTop, _) => VSTop + case (_, VSTop) => VSTop + case (VS(m1), VS(m2)) => + VS(m1.keys.foldLeft(m2) { + case (acc, k) => + val v1 = m1(k) + val v2 = m2(k) + acc + (k -> lattice.lub(v1, v2)) + }) + } + } + +// def meet(x: ValueSet[String], y: ValueSet[String]): ValueSet[String] = { +// (x, y) match { +// case (VSBottom, t) => VSBottom +// case (t, VSBottom) => VSBottom +// case (VSTop, _) => y +// case (_, VSTop) => x +// case (VS(m1), VS(m2)) => +// VS(m1.keys.foldLeft(m2) { +// case (acc, k) => +// val v1 = m1(k) +// val v2 = m2(k) +// acc + (k -> lattice.meet(v1, v2)) +// }) +// } +// } + + def add(x: ValueSet[T], y: ValueSet[T]): ValueSet[T] = { + (x, y) match { + case (VSBottom, t) => t + case (t, VSBottom) => t + case (VSTop, _) => VSTop + case (_, VSTop) => VSTop + case (VS(m1), VS(m2)) => + VS(m1.keys.foldLeft(m2) { + case (acc, k) => + val v1 = m1(k) + val v2 = m2(k) + acc + (k -> lattice.add(v1, v2)) + }) + } + } + + def add(x: ValueSet[T], y: BitVecLiteral): ValueSet[T] = { + x match { + case VSBottom => VSBottom + case VSTop => VSTop + case VS(m) => + VS(m.map { + case (k, s) => k -> lattice.add(s, y.value, y.size) // TODO: is the size correct here? + }) + } + } + + def widen(vs1: ValueSet[T], vs2: ValueSet[T]): ValueSet[T] = { + (vs1, vs2) match { + case (VSBottom, t) => ??? + case (t, VSBottom) => ??? + case (VSTop, _) => VSTop + case (_, VSTop) => VSTop + case (VS(m1), VS(m2)) => + VS(m1.keys.foldLeft(m2) { + case (acc, k) => + val v1 = m1(k) + val v2 = m2(k) + acc + (k -> lattice.widen(v1, v2)) + }) + } + } + + def removeLowerBounds(vs: ValueSet[T]): ValueSet[T] = { + vs match { + case VSBottom => VSBottom + case VSTop => VSTop + case VS(m) => + VS(m.map { + case (k, SI(s, l, u, w)) => k -> SI(s, lattice.lowestPossibleValue, u, w) + }) + } + } + + def removeUpperBound(vs: ValueSet[T]): ValueSet[T] = { + vs match { + case VSBottom => VSBottom + case VSTop => VSTop + case VS(m) => + VS(m.map { + case (k, SI(s, l, u, w)) => k -> SI(s, l, lattice.highestPossibleValue, w) + }) + } + } + + /** + * ∗(vs, s): Returns a pair of sets (F, P). F represents the set of “fully accessed” a-locs: it + * consists of the a-locs that are of size s and whose starting addresses are in vs. P represents + * the set of “partially accessed” a-locs: it consists of (i) a-locs whose starting addresses are in + * vs but are not of size s, and (ii) a-locs whose addresses are in vs but whose starting addresses + * and sizes do not meet the conditions to be in F. [Reference VSA paper] + * + * @param vsR2 + * @param s size of the dereference + * @return + */ + def dereference(s: BigInt, vs: ValueSet[String], mmm: MemoryModelMap): (Set[MemoryRegion], Set[MemoryRegion]) = { + vs match { + case VSBottom => VSBottom + case VSTop => ??? //TODO: should this return everything? + case VS(m) => + for (elem <- m) { + if (elem._2 != lattice.bottom) { // region SI defined + elem._2 match { + case SI(stride, lower, upper) => + val gamma: Set[BitVecLiteral] = lattice.gamma(SI(stride, lower, upper)) + // TODO: Global memory size can be retrieved from the symbol table and are of size s + // Map addresses to exact memory locations + val fullyAccessedLocations = gamma.toList.flatMap(address => mmm.findStackFullAccessesOnly(address.value, s)) + + // Identify partially accessed locations (if any) + val partiallyAccessedLocations = gamma.toList.flatMap(address => mmm.findStackPartialAccessesOnly(address.value, s)) + + // Return the set of fully accessed locations and the set of partially accessed locations + return (fullyAccessedLocations.diff(partiallyAccessedLocations).asInstanceOf[Set[MemoryRegion]], partiallyAccessedLocations.asInstanceOf[Set[MemoryRegion]]) + case _ => ??? + } + } + } + } + (Set.empty, Set.empty) + } +} + + + +/** The powerset lattice of a set of elements of type `A` with subset ordering. + */ +class PowersetLattice[A] extends Lattice[Set[A]] { + val bottom: Set[A] = Set.empty + def lub(x: Set[A], y: Set[A]): Set[A] = x.union(y) +} + +// Single element lattice (using Option) +class SingleElementLattice[T] extends Lattice[Option[T]] { + val bottom: Option[T] = None + def lub(x: Option[T], y: Option[T]): Option[T] = (x, y) match { + case (None, None) => None + case _ => Some(x.getOrElse(y.get)) + } +} + +trait LiftedElement[+T] +case class Lift[T](el: T) extends LiftedElement[T] { + override def toString = s"Lift($el)" +} +case object LiftedBottom extends LiftedElement[Nothing] { + override def toString = "LiftBot" +} +/** + * The lift lattice for `sublattice`. + * Supports implicit lifting and unlifting. + */ +class LiftLattice[T, +L <: Lattice[T]](val sublattice: L) extends Lattice[LiftedElement[T]] { + + val bottom: LiftedElement[T] = LiftedBottom + + def lub(x: LiftedElement[T], y: LiftedElement[T]): LiftedElement[T] = + (x, y) match { + case (LiftedBottom, t) => t + case (t, LiftedBottom) => t + case (Lift(a), Lift(b)) => Lift(sublattice.lub(a, b)) + } + + /** + * Lift elements of the sublattice to this lattice. + * Note that this method is declared as implicit, so the conversion can be done automatically. + */ + def lift(x: T): LiftedElement[T] = Lift(x) + + /** + * Un-lift elements of this lattice to the sublattice. + * Throws an IllegalArgumentException if trying to unlift the bottom element + * Note that this method is declared as implicit, so the conversion can be done automatically. + */ + def unlift(x: LiftedElement[T]): T = x match { + case Lift(s) => s + case LiftedBottom => throw new IllegalArgumentException("Cannot unlift bottom") + } +} + +trait TwoElement + +case object TwoElementTop extends TwoElement +case object TwoElementBottom extends TwoElement + + +/** + * A lattice with only top and bottom + */ +class TwoElementLattice extends Lattice[TwoElement]: + override val bottom: TwoElement = TwoElementBottom + override val top: TwoElement = TwoElementTop + + def lub(x: TwoElement, y: TwoElement): TwoElement = (x, y) match { + case (TwoElementBottom, TwoElementBottom) => TwoElementBottom + case _ => TwoElementTop + } + +trait FlatElement[+T] +case class FlatEl[T](el: T) extends FlatElement[T] +case object Top extends FlatElement[Nothing] +case object Bottom extends FlatElement[Nothing] + +/** The flat lattice made of element of `X`. Top is greater than every other element, and Bottom is less than every + * other element. No additional ordering is defined. + */ +class FlatLattice[X] extends Lattice[FlatElement[X]] { + + val bottom: FlatElement[X] = Bottom + + override val top: FlatElement[X] = Top + + def lub(x: FlatElement[X], y: FlatElement[X]): FlatElement[X] = (x, y) match { + case (a, Bottom) => a + case (Bottom, b) => b + case (a, b) if a == b => a + case (Top, _) => Top + case (_, Top) => Top + case _ => Top + } +} + +class TupleLattice[L1 <: Lattice[T1], L2 <: Lattice[T2], T1, T2](val lattice1: L1, val lattice2: L2) extends Lattice[(T1, T2)] { + override val bottom: (T1, T2) = (lattice1.bottom, lattice2.bottom) + + override def lub(x: (T1, T2), y: (T1, T2)): (T1, T2) = { + val (x1, x2) = x + val (y1, y2) = y + (lattice1.lub(x1, y1), lattice2.lub(x2, y2)) + } + + override def leq(x: (T1, T2), y: (T1, T2)): Boolean = { + val (x1, x2) = x + val (y1, y2) = y + lattice1.leq(x1, y1) && lattice2.leq(x2, y2) + } + + override def top: (T1, T2) = (lattice1.top, lattice2.top) +} + +//trait StridedIntervalLattice[T] extends Lattice[(T, T, T)] { +// override val bottom: (T, T, T) = (???, ???, ???) +// +// override def lub(x: (T1, T2), y: (T1, T2)): (T1, T2) = { +// val (x1, x2) = x +// val (y1, y2) = y +// (lattice1.lub(x1, y1), lattice2.lub(x2, y2)) +// } +// +// override def leq(x: (T1, T2), y: (T1, T2)): Boolean = { +// val (x1, x2) = x +// val (y1, y2) = y +// lattice1.leq(x1, y1) && lattice2.leq(x2, y2) +// } +// +// override def top: (T1, T2) = (lattice1.top, lattice2.top) +//} + +/** A lattice of maps from a set of elements of type `A` to a lattice with element `L'. Bottom is the default value. + */ +class MapLattice[A, T, +L <: Lattice[T]](val sublattice: L) extends Lattice[Map[A, T]] { + val bottom: Map[A, T] = Map().withDefaultValue(sublattice.bottom) + def lub(x: Map[A, T], y: Map[A, T]): Map[A, T] = + x.keys.foldLeft(y)((m, a) => m + (a -> sublattice.lub(x(a), y(a)))).withDefaultValue(sublattice.bottom) +} + +/** Constant propagation lattice. + * + */ +class ConstantPropagationLattice extends FlatLattice[BitVecLiteral] { + private def apply(op: (BitVecLiteral, BitVecLiteral) => BitVecLiteral, a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = try { + (a, b) match + case (FlatEl(x), FlatEl(y)) => FlatEl(op(x, y)) + case (Bottom, _) => Bottom + case (_, Bottom) => Bottom + case (_, Top) => Top + case (Top, _) => Top + } catch { + case e: Exception => + Logger.error(s"Failed on op $op with $a and $b") + throw e + } + + private def apply(op: BitVecLiteral => BitVecLiteral, a: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = a match + case FlatEl(x) => FlatEl(op(x)) + case Top => Top + case Bottom => Bottom + + def bv(a: BitVecLiteral): FlatElement[BitVecLiteral] = FlatEl(a) + def bvadd(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvadd, a, b) + def bvsub(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvsub, a, b) + def bvmul(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvmul, a, b) + def bvudiv(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvudiv, a, b) + def bvsdiv(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvsdiv, a, b) + def bvsrem(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvsrem, a, b) + def bvurem(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvurem, a, b) + def bvsmod(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvsmod, a, b) + def bvand(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvand, a, b) + def bvor(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvor, a, b) + def bvxor(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvxor, a, b) + def bvnand(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvnand, a, b) + def bvnor(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvnor, a, b) + def bvxnor(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvxnor, a, b) + def bvnot(a: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvnot, a) + def bvneg(a: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvneg, a) + def bvshl(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvshl, a, b) + def bvlshr(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvlshr, a, b) + def bvashr(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvashr, a, b) + def bvcomp(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvcomp, a, b) + def zero_extend(width: Int, a: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_zero_extend(width, _: BitVecLiteral), a) + def sign_extend(width: Int, a: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_sign_extend(width, _: BitVecLiteral), a) + def extract(high: Int, low: Int, a: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = + apply(BitVectorEval.boogie_extract(high, low, _: BitVecLiteral), a) + def concat(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_concat, a, b) +} + +/** Constant propagation lattice. + * + */ +class ConstantPropagationLatticeWithSSA extends PowersetLattice[BitVecLiteral] { + private def apply(op: (BitVecLiteral, BitVecLiteral) => BitVecLiteral, a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = + val res = for { + x <- a + y <- b + } yield op(x, y) + res + + private def apply(op: BitVecLiteral => BitVecLiteral, a: Set[BitVecLiteral]): Set[BitVecLiteral] = + val res = for { + x <- a + } yield op(x) + res + + def bv(a: BitVecLiteral): Set[BitVecLiteral] = Set(a) + def bvadd(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvadd, a, b) + def bvsub(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvsub, a, b) + def bvmul(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvmul, a, b) + def bvudiv(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvudiv, a, b) + def bvsdiv(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvsdiv, a, b) + def bvsrem(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvsrem, a, b) + def bvurem(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvurem, a, b) + def bvsmod(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvsmod, a, b) + def bvand(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvand, a, b) + def bvor(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvor, a, b) + def bvxor(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvxor, a, b) + def bvnand(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvnand, a, b) + def bvnor(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvnor, a, b) + def bvxnor(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvxnor, a, b) + def bvnot(a: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvnot, a) + def bvneg(a: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvneg, a) + def bvshl(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvshl, a, b) + def bvlshr(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvlshr, a, b) + def bvashr(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvashr, a, b) + def bvcomp(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvcomp, a, b) + def zero_extend(width: Int, a: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_zero_extend(width, _: BitVecLiteral), a) + def sign_extend(width: Int, a: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_sign_extend(width, _: BitVecLiteral), a) + + def extract(high: Int, low: Int, a: Set[BitVecLiteral]): Set[BitVecLiteral] = + apply(BitVectorEval.boogie_extract(high, low, _: BitVecLiteral), a) + + def concat(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_concat, a, b) +} \ No newline at end of file diff --git a/src/main/scala/analysis/BitVectorEval.scala b/src/main/scala/analysis/BitVectorEval.scala index a3da4de13..23cd152c0 100644 --- a/src/main/scala/analysis/BitVectorEval.scala +++ b/src/main/scala/analysis/BitVectorEval.scala @@ -329,20 +329,28 @@ object BitVectorEval { } } - def smt_min(s: BitVecLiteral, t: BitVecLiteral): BitVecLiteral = { + def bitVec_min(s: BitVecLiteral, t: BitVecLiteral): BitVecLiteral = { if (smt_bvslt(s, t) == TrueLiteral) s else t } - def smt_max(s: BitVecLiteral, t: BitVecLiteral): BitVecLiteral = { + def bitVec_min(s: List[BitVecLiteral]): BitVecLiteral = { + s.reduce(bitVec_min) + } + + def bitVec_max(s: BitVecLiteral, t: BitVecLiteral): BitVecLiteral = { if (smt_bvslt(s, t) == TrueLiteral) t else s } + def bitVec_max(s: List[BitVecLiteral]): BitVecLiteral = { + s.reduce(bitVec_max) + } + @tailrec - def smt_gcd(a: BitVecLiteral, b: BitVecLiteral): BitVecLiteral = { - if (b.value == 0) a else smt_gcd(b, smt_bvsmod(a, b)) + def bitVec_gcd(a: BitVecLiteral, b: BitVecLiteral): BitVecLiteral = { + if (b.value == 0) a else bitVec_gcd(b, smt_bvsmod(a, b)) } - def smt_interval(lb: BitVecLiteral, ub: BitVecLiteral, step: BitVecLiteral): Set[BitVecLiteral] = { + def bitVec_interval(lb: BitVecLiteral, ub: BitVecLiteral, step: BitVecLiteral): Set[BitVecLiteral] = { require(smt_bvule(lb, ub) == TrueLiteral, "Lower bound must be less than or equal to upper bound") (lb.value to ub.value by step.value).map(BitVecLiteral(_, lb.size)).toSet } diff --git a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala index 4f5ccc458..9d5511003 100644 --- a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala +++ b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala @@ -5,14 +5,21 @@ import ir.* import util.Logger import scala.collection.mutable +trait RegisterEquality: + def variable: Variable + def assigns: Set[LocalAssign] + /** Wrapper for variables so we can have Steensgaard-specific equals method indirectly * Relies on the SSA sets intersection being non-empty * */ -case class RegisterVariableWrapper(variable: Variable, assigns: Set[LocalAssign]) { +case class RegisterWrapperPartialEquality(variable: Variable, assigns: Set[LocalAssign]) extends RegisterEquality { override def equals(obj: Any): Boolean = { obj match { - case RegisterVariableWrapper(other, otherAssigns) => + case RegisterWrapperPartialEquality(other, otherAssigns) => variable == other && assigns.intersect(otherAssigns).nonEmpty + case RegisterWrapperEqualSets(other, otherAssigns) => + // treat it as Partial Equality + RegisterWrapperPartialEquality(variable, assigns) == RegisterWrapperPartialEquality(other, otherAssigns) case _ => false } @@ -22,11 +29,14 @@ case class RegisterVariableWrapper(variable: Variable, assigns: Set[LocalAssign] /** Wrapper for variables so we can have ConstantPropegation-specific equals method indirectly * Relies on SSA sets being exactly the same * */ -case class RegisterWrapperEqualSets(variable: Variable, assigns: Set[LocalAssign]) { +case class RegisterWrapperEqualSets(variable: Variable, assigns: Set[LocalAssign]) extends RegisterEquality { override def equals(obj: Any): Boolean = { obj match { case RegisterWrapperEqualSets(other, otherAssigns) => variable == other && assigns == otherAssigns + case RegisterWrapperPartialEquality(other, otherAssigns) => + // treat it as Partial Equality + RegisterWrapperPartialEquality(variable, assigns) == RegisterWrapperPartialEquality(other, otherAssigns) case _ => false } @@ -37,12 +47,12 @@ case class RegisterWrapperEqualSets(variable: Variable, assigns: Set[LocalAssign * expression node in the AST. It is implemented using [[analysis.solvers.UnionFindSolver]]. */ class InterprocSteensgaardAnalysis( - program: Program, - constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], - regionAccesses: Map[CfgNode, Map[RegisterVariableWrapper, FlatElement[Expr]]], - mmm: MemoryModelMap, - reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], - globalOffsets: Map[BigInt, BigInt]) extends Analysis[Any] { + program: Program, + constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], + regionAccesses: Map[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]], + mmm: MemoryModelMap, + reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], + globalOffsets: Map[BigInt, BigInt]) extends Analysis[Any] { val solver: UnionFindSolver[StTerm] = UnionFindSolver() @@ -249,7 +259,7 @@ class InterprocSteensgaardAnalysis( // X = alloc P: [[X]] = ↑[[alloc-i]] if (directCall.target.name == "malloc") { val alloc = HeapRegion(nextMallocCount(), BitVecLiteral(BigInt(0), 0), IRWalk.procedure(cmd)) - unify(IdentifierVariable(RegisterVariableWrapper(mallocVariable, getUse(mallocVariable, cmd, reachingDefs))), PointerRef(AllocVariable(alloc))) + unify(IdentifierVariable(RegisterWrapperPartialEquality(mallocVariable, getUse(mallocVariable, cmd, reachingDefs))), PointerRef(AllocVariable(alloc))) } case localAssign: LocalAssign => @@ -257,7 +267,7 @@ class InterprocSteensgaardAnalysis( case binOp: BinaryExpr => // X1 = &X2: [[X1]] = ↑[[X2]] exprToRegion(binOp, cmd).foreach( - x => unify(IdentifierVariable(RegisterVariableWrapper(localAssign.lhs, getDefinition(localAssign.lhs, cmd, reachingDefs))), PointerRef(AllocVariable(x))) + x => unify(IdentifierVariable(RegisterWrapperPartialEquality(localAssign.lhs, getDefinition(localAssign.lhs, cmd, reachingDefs))), PointerRef(AllocVariable(x))) ) // TODO: should lookout for global base + offset case as well case _ => @@ -270,7 +280,7 @@ class InterprocSteensgaardAnalysis( X2_star.foreach( x => unify(ExpressionVariable(x), PointerRef(alpha)) ) - unify(alpha, IdentifierVariable(RegisterVariableWrapper(X1, getDefinition(X1, cmd, reachingDefs)))) + unify(alpha, IdentifierVariable(RegisterWrapperPartialEquality(X1, getDefinition(X1, cmd, reachingDefs)))) Logger.debug("Memory load: " + memoryLoad) Logger.debug("Index: " + memoryLoad.index) @@ -282,13 +292,13 @@ class InterprocSteensgaardAnalysis( // X1 = &X: [[X1]] = ↑[[X2]] (but for globals) val $X2 = exprToRegion(memoryLoad.index, cmd) $X2.foreach( - x => unify(IdentifierVariable(RegisterVariableWrapper(localAssign.lhs, getDefinition(localAssign.lhs, cmd, reachingDefs))), PointerRef(AllocVariable(x))) + x => unify(IdentifierVariable(RegisterWrapperPartialEquality(localAssign.lhs, getDefinition(localAssign.lhs, cmd, reachingDefs))), PointerRef(AllocVariable(x))) ) case variable: Variable => // X1 = X2: [[X1]] = [[X2]] val X1 = localAssign.lhs val X2 = variable - unify(IdentifierVariable(RegisterVariableWrapper(X1, getDefinition(X1, cmd, reachingDefs))), IdentifierVariable(RegisterVariableWrapper(X2, getUse(X2, cmd, reachingDefs)))) + unify(IdentifierVariable(RegisterWrapperPartialEquality(X1, getDefinition(X1, cmd, reachingDefs))), IdentifierVariable(RegisterWrapperPartialEquality(X2, getUse(X2, cmd, reachingDefs)))) case _ => // do nothing } } @@ -329,16 +339,16 @@ class InterprocSteensgaardAnalysis( /** @inheritdoc */ - def pointsTo(): Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]] = { + def pointsTo(): Map[RegisterWrapperPartialEquality, Set[RegisterWrapperPartialEquality | MemoryRegion]] = { val solution = solver.solution() val unifications = solver.unifications() Logger.debug(s"Solution: \n${solution.mkString(",\n")}\n") Logger.debug(s"Sets: \n${unifications.values.map { s => s"{ ${s.mkString(",")} }"}.mkString(", ")}") val vars = solution.keys.collect { case id: IdentifierVariable => id } - val emptyMap = Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]]() + val emptyMap = Map[RegisterWrapperPartialEquality, Set[RegisterWrapperPartialEquality | MemoryRegion]]() val pointsto = vars.foldLeft(emptyMap) { (a, v: IdentifierVariable) => - val pt: Set[RegisterVariableWrapper | MemoryRegion] = unifications(solution(v)).collect { + val pt: Set[RegisterWrapperPartialEquality | MemoryRegion] = unifications(solution(v)).collect { case PointerRef(IdentifierVariable(id)) => id case PointerRef(AllocVariable(alloc)) => alloc }.toSet @@ -350,9 +360,9 @@ class InterprocSteensgaardAnalysis( /** @inheritdoc */ - def mayAlias(): (RegisterVariableWrapper, RegisterVariableWrapper) => Boolean = { + def mayAlias(): (RegisterWrapperPartialEquality, RegisterWrapperPartialEquality) => Boolean = { val solution = solver.solution() - (id1: RegisterVariableWrapper, id2: RegisterVariableWrapper) => + (id1: RegisterWrapperPartialEquality, id2: RegisterWrapperPartialEquality) => val sol1 = solution(IdentifierVariable(id1)) val sol2 = solution(IdentifierVariable(id2)) sol1 == sol2 && sol1.isInstanceOf[PointerRef] // same equivalence class, and it contains a reference @@ -372,7 +382,7 @@ case class AllocVariable(alloc: MemoryRegion) extends StTerm with Var[StTerm] { /** A term variable that represents an identifier in the program. */ -case class IdentifierVariable(id: RegisterVariableWrapper) extends StTerm with Var[StTerm] { +case class IdentifierVariable(id: RegisterWrapperPartialEquality) extends StTerm with Var[StTerm] { override def toString: String = s"$id" } diff --git a/src/main/scala/analysis/LAST_VSA_BACKUP.scala b/src/main/scala/analysis/LAST_VSA_BACKUP.scala new file mode 100644 index 000000000..92bff84b7 --- /dev/null +++ b/src/main/scala/analysis/LAST_VSA_BACKUP.scala @@ -0,0 +1,276 @@ +//package analysis +//import ir.* +//import util.* +// +//import scala.collection.mutable +//import analysis.BitVectorEval.* +//import analysis.* +// +//class ActualVSA(program: Program, +// constantPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], +// reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], +// mmm: MemoryModelMap) { +// +// enum Flag { +// case CF // Carry Flag +// case ZF // Zero Flag +// case SF // Sign Flag +// case PF // Parity Flag +// case AF // Auxiliary Flag +// case OF // Overflow Flag +// } +// +// enum Bool3 { +// case True +// case False +// case Maybe +// } +// +// // TODO: This assumes no function is called Data or Heap (should be a tuple instead) +// val DATA_REGION_NAME = "Data" +// val HEAP_REGION_NAME = "Heap" +// +// val lattice: ValueSetLattice = ValueSetLattice() +// +// type MemRgn = String // all record titles +// +// val MEMORY_REGIONS: List[MemRgn] = (Set(DATA_REGION_NAME, HEAP_REGION_NAME) ++ mmm.getAllocsPerProcedure.keySet).toList.sorted +// val ALLOCS: Map[String, Set[MemoryRegion]] = mmm.getAllocsPerProcedure.asInstanceOf[Map[String, Set[MemoryRegion]]] ++ Map("Data" -> mmm.getAllDataRegions.asInstanceOf[Set[MemoryRegion]], "Heap" -> mmm.getAllHeapRegions.asInstanceOf[Set[MemoryRegion]]) +// val AllocEnv: AlocEnv = AlocEnv() +// +// // /** +// // * ∗(vs, s): Returns a pair of sets (F, P). F represents the set of “fully accessed” a-locs: it +// // * consists of the a-locs that are of size s and whose starting addresses are in vs. P represents +// // * the set of “partially accessed” a-locs: it consists of (i) a-locs whose starting addresses are in +// // * vs but are not of size s, and (ii) a-locs whose addresses are in vs but whose starting addresses +// // * and sizes do not meet the conditions to be in F. [Reference VSA paper] +// // * +// // * @param vsR2 +// // * @param s size of the dereference +// // * @return +// // */ +// // def dereference(s: BigInt): (Set[MemoryRegion], Set[MemoryRegion]) = { +// // // TODO: Global memory size can be retrieved from the symbol table and are of size s +// // // Map addresses to exact memory locations +// // val fullyAccessedLocations = stridedInterval.gamma.flatMap(address => mmm.findStackFullAccessesOnly(address.value, s)) +// // +// // // Identify partially accessed locations (if any) +// // val partiallyAccessedLocations = stridedInterval.gamma.flatMap(address => mmm.findStackPartialAccessesOnly(address.value, s)) +// // +// // // Return the set of fully accessed locations and the set of partially accessed locations +// // (fullyAccessedLocations.diff(partiallyAccessedLocations).asInstanceOf[Set[MemoryRegion]], partiallyAccessedLocations.asInstanceOf[Set[MemoryRegion]]) +// // } +// // } +// +// /** +// * Allocs Structure +// * Procedures +// * main -> {alloc1, alloc2, alloc3} +// * foo -> {alloc4, alloc5} +// * Data +// * Data -> {alloc6, alloc7} +// * Heap +// * Heap -> {alloc8, alloc9} +// */ +// case class AlocEnv() { +// private val envs: mutable.Map[MemRgn, StridedInterval[BitVecLiteral]] = preCalculate() +// private val valueSets: mutable.Map[MemRgn, ValueSet[String]] = mutable.Map[MemRgn, ValueSet[String]]() +// +// def preCalculate(): mutable.Map[MemRgn, StridedInterval[BitVecLiteral]] = { +// val res = mutable.Map[MemRgn, StridedInterval[BitVecLiteral]]() +// MEMORY_REGIONS.foreach(r => { +// res.put(r, getSrtidedIntervals(r)) +// }) +// res +// } +// +// private def getSrtidedIntervals(r: MemRgn): StridedInterval[BitVecLiteral] = { +// // if stack or data we have offset. Otherwise we mark it as bottom VS +// if (r == DATA_REGION_NAME) { +// val allocsThatBelong = ALLOCS(r).asInstanceOf[Set[DataRegion]] +// lattice.lattice.valuesToSI(allocsThatBelong.map(a => a.start).toList) +// } else if (r == HEAP_REGION_NAME) { +// lattice.lattice.bottom +// } else { +// val allocsThatBelong = ALLOCS(r).asInstanceOf[Set[StackRegion]] +// lattice.lattice.valuesToSI(allocsThatBelong.map(a => a.start).toList) +// } +// } +// +// def getVS(r: MemRgn): ValueSet[String] = { +// if (valueSets.contains(r)) { +// valueSets(r) +// } else { +// // map everything that is not r to bottom +// val cpy = envs.clone() +// cpy.keys.foreach(k => if k != r then cpy(k) = lattice.lattice.bottom) +// valueSets.put(r, VS(cpy.toMap)) +// VS(cpy.toMap) +// } +// } +// } +// +// case class AbsEnv(): +// var regEnv: mutable.Map[Variable, VS[String]] = mutable.Map[Variable, VS[String]]().withDefault(_ => lattice.bottom) +// var flagEnv: mutable.Map[Flag, Bool3] = mutable.Map[Flag, Bool3]().withDefault(_ => Bool3.Maybe) +// var alocEnv: AlocEnv = AlocEnv() +// +// def join(absEnv: AbsEnv): AbsEnv = { +// val out = AbsEnv() +// out.regEnv = regEnv.clone() +// out.flagEnv = flagEnv.clone() +// out.alocEnv = alocEnv +// absEnv.regEnv.foreach { case (k, v) => +// out.regEnv(k) = lattice.lub(regEnv(k), v) +// } +// absEnv.flagEnv.foreach { case (k, v) => +// out.flagEnv(k) = ??? +// } +// out +// } +// +// override def toString: String = { +// val env1Str = regEnv.map { case (k, v) => s"$k -> $v" }.mkString("\n\n") +// val env2Str = flagEnv.map { case (k, v) => s"$k -> $v" }.mkString("\n\n") +// val env3Str = alocEnv.toString +// s"Env1:\n\n$env1Str\n\nEnv2:\n\n$env2Str\n\nEnv3:\n\n$env3Str" +// } +// +// def AbstractTransformer(in: AbsEnv, instruction: CFGPosition): AbsEnv = { +// instruction match { +// case p: Procedure => in +// case b: Block => in +// case c: Command => +// c match +// case statement: Statement => +// statement match +// case localAssign: LocalAssign => +// localAssign.rhs match +// case binOp: BinaryExpr => +// if (binOp.arg1.isInstanceOf[Variable]) { +// val R1 = localAssign.lhs +// val R2 = binOp.arg1.asInstanceOf[Variable] +// val c = evaluateExpression(binOp.arg2, constantPropResult(instruction)) +// if (c.isDefined) { +// +// // R1 = R2 + c +// val out = in +// val vs_R2: ValueSet[String] = in.regEnv.get(R2) +// out.regEnv(R1) = lattice.add(vs_R2, c.get) +// return out +// } +// } +// in +// case memoryLoad: MemoryLoad => +// memoryLoad.index match +// case binOp: BinaryExpr => +// if (binOp.arg2.isInstanceOf[Variable]) { +// val R1 = localAssign.lhs +// val R2 = binOp.arg1.asInstanceOf[Variable] // TODO: Is R2 always a variable? +// val out = in +// getDefinition(binOp.arg2.asInstanceOf[Variable], instruction, reachingDefs).foreach { +// d => +// d.rhs match +// case binOp2: BinaryExpr => +// val c1 = evaluateExpression(binOp2.arg1, constantPropResult(instruction)) +// val c2 = evaluateExpression(binOp2.arg2, constantPropResult(instruction)) +// // R1 = *(R2 + c1) + c2 +// val vs_R2: ValueSet[String] = in.regEnv(R2) +// val s = memoryLoad.size // s is the size of dereference performed by the instruction +// val (f: Set[MemoryRegion], p: Set[MemoryRegion]) = lattice.dereference(BigInt(s), vs_R2, mmm) +// println("VSA") +// println(f) +// if (p.isEmpty) { +// val vs_rhs = f.map(r => in.regEnv(r).getAAlloc(r).valueSet).fold(lattice.bottom)(_ join _) +// out.env1(R1) = lattice.add(vs_rhs, c2.get) +// } else { +// out.env1(R1) = lattice.top +// } +// case _ => +// } +// out +// } else { +// in +// } +// case _ => in // TODO: Handle other cases +// case variable: Variable => +// ??? +// // val R1 = localAssign.lhs +// // val R2 = variable +// // // R1 >= R2 +// // val out = in +// // val vs_R1 = in.env1.getOrElseUpdate(R1, ValueSetLattice.BOTTOM) +// // val vs_R2 = in.env1(R2) +// // val vs_lb = vs_R2.removeUpperBounds() +// // val vs_ub = vs_R1.removeLowerBounds() +// // out.env1(R1) = vs_R1.meet(vs_lb) +// // out.env1(R2) = vs_R2.meet(vs_ub) +// // out +// case bitVecLiteral: BitVecLiteral => +// ??? +// // val R1 = localAssign.lhs +// // val c = bitVecLiteral +// // // R1 <= c +// // // from 0 to c, all value sets are possible (ie. stack, global) TODO: this may be wrong because of the _ join _? +// // val interval = bitVec_interval(BitVecLiteral(0, c.size), c, BitVecLiteral(1, c.size)) +// // val regions: mutable.Set[MemoryRegion] = mutable.Set() +// // println(c) +// // interval.foreach(v => +// // val dataObject = mmm.findDataObject(v.value) +// // if dataObject.isDefined then regions.add(dataObject.get) +// // ) +// // TOP_STRIDE.gamma.map(v => regions.add(mmm.findStackObject(v.value).get)) +// // +// // val allValueSets: mutable.Set[ValueSet] = mutable.Set() +// // regions.foreach(r => allValueSets.add(in.env2(r).getAAlloc(r).valueSet)) +// // val vs_c = allValueSets.fold(ValueSetLattice.BOTTOM)(_ join _) +// // val out = in +// // out.env1(R1) = in.env1(R1).meet(vs_c) +// // out +// +// // val vs_c = ValueSet(Set(StridedInterval(smt_gcd(BitVecLiteral(BigInt(0), c.size), c), BitVecLiteral(BigInt(0), c.size), c))) // TODO: Fix ME +// // val out = in +// // out.env1(R1) = in.env1(R1).meet(vs_c) +// // out +// case _ => in // TODO: Handle other cases +// case memoryAssign: MemoryAssign => in // TODO: *(R1 + c1) = R2 + c2 +// case nop: NOP => in +// case assert: Assert => in +// case assume: Assume => in +// case jump: Jump => in +// } +// } +// +// def IntraProceduralVSA(): mutable.Map[CFGPosition, AbsEnv] = { +// val worklist = new mutable.Queue[CFGPosition]() +// worklist.enqueue(program.mainProcedure) +// +// val absEnv_enter = AbsEnv() +// val abstractStates = mutable.Map[CFGPosition, AbsEnv](worklist.head -> absEnv_enter) +// while(worklist.nonEmpty) { +// val n: CFGPosition = worklist.dequeue() +// val m = IntraProcIRCursor.succ(n) +// for (succ <- m) { +// mmm.popContext() +// mmm.pushContext(IRWalk.procedure(n).name) +// val edge_amc = AbstractTransformer(abstractStates(n), succ) +// Propagate(succ, edge_amc) +// } +// } +// +// def Propagate(n: CFGPosition, edge_amc: AbsEnv): Unit = { +// if (!abstractStates.contains(n)) { +// abstractStates(n) = edge_amc +// worklist.enqueue(n) +// } else { +// val oldEnv = abstractStates(n) +// val newEnv = oldEnv.join(edge_amc) +// if (newEnv != oldEnv) { +// abstractStates(n) = newEnv +// worklist.enqueue(n) +// } +// } +// } +// abstractStates +// } +//} diff --git a/src/main/scala/analysis/Lattice.scala b/src/main/scala/analysis/Lattice.scala index 5c3ccd630..34dc8f43a 100644 --- a/src/main/scala/analysis/Lattice.scala +++ b/src/main/scala/analysis/Lattice.scala @@ -1,8 +1,9 @@ package analysis import ir._ -import analysis.BitVectorEval +import analysis.BitVectorEval._ import util.Logger +import math.pow /** Basic lattice */ @@ -25,6 +26,794 @@ trait Lattice[T]: */ def leq(x: T, y: T): Boolean = lub(x, y) == y // rarely used, but easy to implement :-) +//trait StridedInterval[+T] +// +//case class SI[T](s: T, l: T, u: T) extends StridedInterval[T] { +// override def toString = s"SI $s [$l, $u]" +//} +// +//case object SIBottom extends StridedInterval[BitVecLiteral] { +// override def toString = "SIBot" +//} + +///** +// * SI class that represents a strided interval +// * s is the stride +// * l is the lower bound +// * u is the upper bound +// * [l, u] is the interval +// * [l, u] \ s is the set of values +// * 0[l,l] represents the singleton set {l} +// */ +//class StridedIntervalLattice extends Lattice[StridedInterval[BitVecLiteral]] { +// val lowestPossibleValue: BitVecLiteral = BitVecLiteral(0, 64) +// val highestPossibleValue: BitVecLiteral = BitVecLiteral(Long.MaxValue - 1, 64) +// +// override val bottom: StridedInterval[BitVecLiteral] = SIBottom +// override def top: StridedInterval[BitVecLiteral] = SI(BitVecLiteral(1, 64), lowestPossibleValue, highestPossibleValue) +// +// def gamma(x: StridedInterval[BitVecLiteral]): Set[BitVecLiteral] = x match { +// case SIBottom => Set.empty +// case SI(s, l, u) => +// bitVec_interval(l, u, s) +// } +// +// /** S1[L1, U1] join S2[L2, U2] -> gcd(S1, S2)[min(L1, L2), max(U1, U2)] */ +// override def lub(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => t +// case (t, SIBottom) => t +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// SI(bitVec_gcd(s1, s2), bitVec_min(l1, l2), bitVec_max(u1, u2)) +// } +// } +// +// /** S1[L1, U1] meet S2[L2, U2] -> gcd(S1, S2)[max(L1, L2), min(U1, U2)] */ +// def meet(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => SIBottom +// case (t, SIBottom) => SIBottom +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// SI(bitVec_gcd(s1, s2), bitVec_max(l1, l2), bitVec_min(u1, u2)) +// } +// } +// +// /** Addition +// * Addition defined in page 6 Figure 2 of: https://dl.acm.org/doi/pdf/10.1145/1111542.1111560 +// * */ +// def add(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => t +// case (t, SIBottom) => t +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// val lbound = smt_bvadd(l1, l2) +// val ubound = smt_bvadd(u1, u2) +// val s = bitVec_gcd(s1, s2) +// if (smt_bvsle(ubound, highestPossibleValue) == TrueLiteral && smt_bvsge(lbound, lowestPossibleValue) == TrueLiteral) { +// SI(s, lbound, ubound) +// } else { +// throw new IllegalArgumentException(s"Addition overflow: $lbound, $ubound") +// } +// } +// } +// +// /** Unary Minus */ +// def unaryMinus(x: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// x match { +// case SIBottom => SIBottom +// case SI(s, l, u) => +// if (smt_bvcomp(l, u) == BitVecLiteral(1, 1) && (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(1, 1) && smt_bvcomp(u, lowestPossibleValue) == BitVecLiteral(1, 1))) { +// SI(BitVecLiteral(0, 64), lowestPossibleValue, lowestPossibleValue) +// } else if (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(0, 1)) { +// SI(s, smt_bvneg(u), smt_bvneg(l)) +// } +// else { +// SI(BitVecLiteral(1, 64), lowestPossibleValue, highestPossibleValue) +// } +// } +// } +// +// /** Substraction */ +// def sub(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// add(x, unaryMinus(y)) +// } +// +// /** Widen */ +// def widen(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// /* formula for widening: +// given: s1[lb1, ub1] and s2[lb2, ub2] +// then: gcd(s1, s2)[min(lb1, lb2), max(ub1, ub2)] +// where: min(lb1, lb2) = lb1 if lb1 <= lb2 +// and: min(lb1, lb2) = minPossibleValue otherwise +// where: max(ub1, ub2) = ub1 if ub1 >= ub2 +// and: max(ub1, ub2) = maxPossibleValue otherwise +// +// assuming: +// minPossibleValue = lowestPossibleValue +// maxPossibleValue = highestPossibleValue + (lb - 1) mod s +// */ +// (x, y) match { +// case (SIBottom, t) => ??? +// case (t, SIBottom) => ??? +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// val s = bitVec_gcd(s1, s2) +// val l = if (smt_bvule(l1, l2) == TrueLiteral) l1 else lowestPossibleValue +// val u = if (smt_bvuge(u1, u2) == TrueLiteral) u1 else smt_bvsmod(smt_bvadd(highestPossibleValue, smt_bvsub(l1, BitVecLiteral(1, 64))), s) +// SI(s, l, u) +// } +// } +// +// /** +// * Calculating strided interval for a list of values using accumulative gcd. +// * @param x the list of values +// * @return the strided interval representing the values in the list +// */ +// def valuesToSI(x: List[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// if (x.isEmpty) { +// SIBottom +// } else { +// val l = bitVec_min(x) +// val u = bitVec_max(x) +// val initialStride = smt_bvsub(u, l) +// val stride = x.foldLeft(initialStride) { +// case (acc, v) => bitVec_gcd(smt_bvsub(v, l), acc) +// } +// SI(stride, l, u) +// } +// } +//} + + +trait StridedWrappedInterval + +case class SI(s: BigInt, l: BigInt, u: BigInt, w: BigInt) extends StridedWrappedInterval { + if (l == u) { + require(s == 0) + } + override def toString = s"SASI $s [$l, $u] $w" +} + +case object SIBottom extends StridedWrappedInterval { + override def toString = "SASIBot" +} + +// TOP is 1[0^w, 1^w]w +case object SITop extends StridedWrappedInterval { + override def toString = "SASITop" +} + +class SASILattice extends Lattice[StridedWrappedInterval] { + val lowestPossibleValue: BigInt = 0 + val highestPossibleValue: BigInt = Long.MaxValue - 1 + + override val bottom: StridedWrappedInterval = SIBottom + + override def top: StridedWrappedInterval = SITop + +// def gamma(x: StridedWrappedInterval): Set[BitVecLiteral] = x match { +// case SIBottom => Set.empty +// case SI(s, l, u, w) => +// if (s == BitVecLiteral(0, 64)) { // singleton set +// Set(l) +// } else { +// bitVec_interval(l, u, s) +// } +// } + + def isSingleValue(x: StridedWrappedInterval): Boolean = x match { + case SI(s, l, u, w) => s == 0 && l == u + case _ => false + } + + def modularPlus(a: BigInt, b: BigInt, w: BigInt): BigInt = { + (a + b) mod BigInt(2).pow(w.toInt) + } + + def modularMinus(a: BigInt, b: BigInt, w: BigInt): BigInt = { + (a - b) mod BigInt(2).pow(w.toInt) + } + + def modularLEQ(a: BigInt, b: BigInt, x: BigInt, w: BigInt): Boolean = { + modularMinus(a, x, w) <= modularMinus(b, x, w) + } + + def membershipFunction(v: BigInt, r: StridedWrappedInterval): Boolean = { + r match { + case SIBottom => false + case SITop => true + case SI(sr, lb, ub, w) => + modularLEQ(v, ub, lb, w) && (modularMinus(v, lb, w) mod sr) == 0 + } + } + + def cardinalityFunction(r: StridedWrappedInterval, w: BigInt): BigInt = { + r match { + case SIBottom => 0 + case SITop => BigInt(2).pow(w.toInt) + case SI(sr, lb, ub, w) => ((ub - lb + 1) / sr) // TODO: this may need to be a math.floor operation + } + } + + def orderingOperator(r: StridedWrappedInterval, t: StridedWrappedInterval): Boolean = { + if (r == SITop && t != SITop) { + false + } else if (r == SIBottom || t == SITop) { + true + } else { + (r, t) match { + case (SI(sr, a, b, w1), SI(st, c, d, w2)) => + if ((a == c) && (b == d) && ((sr mod st) == 0)) { + return true + } + membershipFunction(a, t) && membershipFunction(b, t) && (!membershipFunction(c, r) || !membershipFunction(d, r)) && ((a - c) mod st) == 0 && (sr mod st) == 0 + case _ => false + } + } + } + + /** S1[L1, U1] join S2[L2, U2] -> gcd(S1, S2)[min(L1, L2), max(U1, U2)] */ + override def lub(r: StridedWrappedInterval, t: StridedWrappedInterval): StridedWrappedInterval = { +// (s, t) match { +// case (SIBottom, t) => t +// case (t, SIBottom) => t +// case (SI(a, b, u1, w1), SI(s2, c, d, w2)) => +// var u: BigInt = 0 +// var l: BigInt = 0 +// if (isSingleValue(s) && isSingleValue(t)) { +// val si1_card = WCardMod() +// val si2_card = WCardMod() +// if (si1_card <= si2_card) { +// l = a +// u = d +// } else { +// l = c +// u = b +// } +// +// SI(u - l, l, u, ) +// } +// } + + (r, t) match { + case (SI(sr, a, b, w1), SI(st, c, d, w2)) => + assert(w1 == w2) + val w = w1 // TODO: should this be the largest? + if (orderingOperator(r, t)) { + return t + } + if (orderingOperator(t, r)) { + return r + } + if (membershipFunction(a, t) && membershipFunction(b, t) && membershipFunction(c, r) && membershipFunction(d, r)) { + return SITop + } + if (membershipFunction(c, r) && membershipFunction(b, t) && !membershipFunction(a, t) && !membershipFunction(d, r)) { + return SI(sr.gcd(st).gcd(modularMinus(d, a, w)), a, d, w) + } + if (membershipFunction(a, t) && membershipFunction(d, r) && !membershipFunction(c, r) && !membershipFunction(b, t)) { + return SI(sr.gcd(st).gcd(modularMinus(b, c, w)), c, b, w) + } + val sad = SI(sr.gcd(st).gcd(modularMinus(d, a, w)), a, d, w) + val scb = SI(sr.gcd(st).gcd(modularMinus(b, c, w)), c, b, w) + if (!membershipFunction(a, t) && !membershipFunction(d, r) && !membershipFunction(c, r) && !membershipFunction(b, t) && cardinalityFunction(sad, w) <= cardinalityFunction(scb, w)) { + return sad + } + return scb + case _ => ??? + } + } + + def singletonSI(v: BigInt, w: BigInt): StridedWrappedInterval = { + SI(0, v, v, w) + } + + /** + * s + t = + * BOT if s = BOT or t = BOT + * gcd(s, t)(|a +w c, b +w d|) if s = (|a, b|), t = (|c, d|) and #s + #t <= 2^w + * @param s + * @param t + * @return + */ + def add(s: StridedWrappedInterval, t: StridedWrappedInterval): StridedWrappedInterval = { + (s, t) match { + case (SIBottom, _) => SIBottom // TODO: is this correct? + case (_, SIBottom) => SIBottom // TODO: is this correct? + case (SI(ss, a, b, w1), SI(st, c, d, w2)) if (cardinalityFunction(s, w1) + cardinalityFunction(t, w2)) <= BigInt(2).pow(w1.toInt) => + assert(w1 == w2) + return SI(ss.gcd(st), modularPlus(a, c, w1), modularPlus(b, d, w1), w1) + case _ => SITop + } + } + + def add(s: StridedWrappedInterval, t: BigInt, w: BigInt): StridedWrappedInterval = { + (s, t) match { + case (SIBottom, _) => SIBottom // TODO: is this correct? + case (SI(ss, a, b, w1), t) => + return add(s, singletonSI(t, w)) + case _ => SITop + } + } + + def sub(s: StridedWrappedInterval, t: StridedWrappedInterval): StridedWrappedInterval = { + (s, t) match { + case (SIBottom, _) => SIBottom // TODO: is this correct? + case (_, SIBottom) => SIBottom // TODO: is this correct? + case (SI(ss, a, b, w1), SI(st, c, d, w2)) if (cardinalityFunction(s, w1) + cardinalityFunction(t, w2)) <= BigInt(2).pow(w1.toInt) => + assert(w1 == w2) + return SI(ss.gcd(st), modularMinus(a, d, w1), modularMinus(b, c, w1), w1) + case _ => SITop + } + } + + def sub(s: StridedWrappedInterval, t: BigInt, w: BigInt): StridedWrappedInterval = { + (s, t) match { + case (SIBottom, _) => SIBottom // TODO: is this correct? + case (SI(ss, a, b, w1), t) => + return sub(s, singletonSI(t, w)) + case _ => SITop + } + } + + +// /** S1[L1, U1] meet S2[L2, U2] -> gcd(S1, S2)[max(L1, L2), min(U1, U2)] */ +// def meet(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => SIBottom +// case (t, SIBottom) => SIBottom +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// SI(bitVec_gcd(s1, s2), bitVec_max(l1, l2), bitVec_min(u1, u2)) +// } +// } +// +// /** Addition +// * Addition defined in page 6 Figure 2 of: https://dl.acm.org/doi/pdf/10.1145/1111542.1111560 +// * */ +// def add(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => t +// case (t, SIBottom) => t +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// val lbound = smt_bvadd(l1, l2) +// val ubound = smt_bvadd(u1, u2) +// val s = bitVec_gcd(s1, s2) +// if (smt_bvsle(ubound, highestPossibleValue) == TrueLiteral && smt_bvsge(lbound, lowestPossibleValue) == TrueLiteral) { +// SI(s, lbound, ubound) +// } else { +// throw new IllegalArgumentException(s"Addition overflow: $lbound, $ubound") +// } +// } +// } +// +// /** Unary Minus */ +// def unaryMinus(x: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// x match { +// case SIBottom => SIBottom +// case SI(s, l, u) => +// if (smt_bvcomp(l, u) == BitVecLiteral(1, 1) && (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(1, 1) && smt_bvcomp(u, lowestPossibleValue) == BitVecLiteral(1, 1))) { +// SI(BitVecLiteral(0, 64), lowestPossibleValue, lowestPossibleValue) +// } else if (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(0, 1)) { +// SI(s, smt_bvneg(u), smt_bvneg(l)) +// } +// else { +// SI(BitVecLiteral(1, 64), lowestPossibleValue, highestPossibleValue) +// } +// } +// } +// +// /** Substraction */ +// def sub(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// add(x, unaryMinus(y)) +// } +// +// /** Widen */ +// def widen(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// /* formula for widening: +// given: s1[lb1, ub1] and s2[lb2, ub2] +// then: gcd(s1, s2)[min(lb1, lb2), max(ub1, ub2)] +// where: min(lb1, lb2) = lb1 if lb1 <= lb2 +// and: min(lb1, lb2) = minPossibleValue otherwise +// where: max(ub1, ub2) = ub1 if ub1 >= ub2 +// and: max(ub1, ub2) = maxPossibleValue otherwise +// +// assuming: +// minPossibleValue = lowestPossibleValue +// maxPossibleValue = highestPossibleValue + (lb - 1) mod s +// */ +// (x, y) match { +// case (SIBottom, t) => ??? +// case (t, SIBottom) => ??? +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// val s = bitVec_gcd(s1, s2) +// val l = if (smt_bvule(l1, l2) == TrueLiteral) l1 else lowestPossibleValue +// val u = if (smt_bvuge(u1, u2) == TrueLiteral) u1 else smt_bvsmod(smt_bvadd(highestPossibleValue, smt_bvsub(l1, BitVecLiteral(1, 64))), s) +// SI(s, l, u) +// } +// } +// +// /** +// * Calculating strided interval for a list of values using accumulative gcd. +// * +// * @param x the list of values +// * @return the strided interval representing the values in the list +// */ +// def valuesToSI(x: List[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// if (x.isEmpty) { +// SIBottom +// } else { +// val l = bitVec_min(x) +// val u = bitVec_max(x) +// val initialStride = smt_bvsub(u, l) +// val stride = x.foldLeft(initialStride) { +// case (acc, v) => bitVec_gcd(smt_bvsub(v, l), acc) +// } +// SI(stride, l, u) +// } +// } +} + +sealed trait ValueSet[T] + +case class VS[T](m: Map[T, StridedWrappedInterval]) extends ValueSet[T] { + override def toString: String = m.toString +} + +/** The lattice of integers with the standard ordering. + */ +class ValueSetLattice[T] extends Lattice[ValueSet[T]] { + + case object VSBottom extends ValueSet[T] { + override def toString = "VSBot" + } + + case object VSTop extends ValueSet[T] { + override def toString = "VSTop" + } + + override val bottom: ValueSet[T] = VSBottom + override def top: ValueSet[T] = VSTop + + val lattice: SASILattice = SASILattice() + + override def lub(x: ValueSet[T], y: ValueSet[T]): ValueSet[T] = { + (x, y) match { + case (VSBottom, t) => t + case (t, VSBottom) => t + case (VSTop, _) => VSTop + case (_, VSTop) => VSTop + case (VS(m1), VS(m2)) => + VS(m1.keys.foldLeft(m2) { + case (acc, k) => + val v1 = m1(k) + val v2 = m2(k) + acc + (k -> lattice.lub(v1, v2)) + }) + } + } + +// def meet(x: ValueSet[String], y: ValueSet[String]): ValueSet[String] = { +// (x, y) match { +// case (VSBottom, t) => VSBottom +// case (t, VSBottom) => VSBottom +// case (VSTop, _) => y +// case (_, VSTop) => x +// case (VS(m1), VS(m2)) => +// VS(m1.keys.foldLeft(m2) { +// case (acc, k) => +// val v1 = m1(k) +// val v2 = m2(k) +// acc + (k -> lattice.meet(v1, v2)) +// }) +// } +// } + + def applyOp(op: BinOp, lhs: ValueSet[T], rhs: Either[ValueSet[T], BitVecLiteral]): ValueSet[T] = { + op match + case bvOp: BVBinOp => + bvOp match + case BVAND => ??? + case BVOR => ??? + case BVADD => rhs match + case Left(vs) => add(lhs, vs) + case Right(bitVecLiteral) => add(lhs, bitVecLiteral) + case BVMUL => ??? + case BVUDIV => ??? + case BVUREM => ??? + case BVSHL => ??? + case BVLSHR => ??? + case BVULT => ??? + case BVNAND => ??? + case BVNOR => ??? + case BVXOR => ??? + case BVXNOR => ??? + case BVCOMP => ??? + case BVSUB => rhs match + case Left(vs) => sub(lhs, vs) + case Right(bitVecLiteral) => sub(lhs, bitVecLiteral) + case BVSDIV => ??? + case BVSREM => ??? + case BVSMOD => ??? + case BVASHR => ??? + case BVULE => ??? + case BVUGT => ??? + case BVUGE => ??? + case BVSLT => ??? + case BVSLE => ??? + case BVSGT => ??? + case BVSGE => ??? + case BVEQ => ??? + case BVNEQ => ??? + case BVCONCAT => ??? + case boolOp: BoolBinOp => + boolOp match + case BoolEQ => applyOp(BVEQ, lhs, rhs) + case BoolNEQ => applyOp(BVNEQ, lhs, rhs) + case BoolAND => applyOp(BVAND, lhs, rhs) + case BoolOR => applyOp(BVOR, lhs, rhs) + case BoolIMPLIES => ??? + case BoolEQUIV => ??? + case intOp: IntBinOp => + applyOp(intOp.toBV, lhs, rhs) + case _ => ??? + } + + def applyOp(op: UnOp, rhs: ValueSet[T]): ValueSet[T] = { + op match + case bvOp: BVUnOp => + bvOp match + case BVNOT => ??? + case BVNEG => ??? + case boolOp: BoolUnOp => + boolOp match + case BoolNOT => ??? + case intOp: IntUnOp => + applyOp(intOp.toBV, rhs) + case _ => ??? + } + + def add(x: ValueSet[T], y: ValueSet[T]): ValueSet[T] = { + (x, y) match { + case (VSBottom, t) => t + case (t, VSBottom) => t + case (VSTop, _) => VSTop + case (_, VSTop) => VSTop + case (VS(m1), VS(m2)) => + VS(m1.keys.foldLeft(m2) { + case (acc, k) => + val v1 = m1(k) + val v2 = m2(k) + acc + (k -> lattice.add(v1, v2)) + }) + } + } + + def add(x: ValueSet[T], y: BitVecLiteral): ValueSet[T] = { + x match { + case VSBottom => VSBottom + case VSTop => VSTop + case VS(m) => + VS(m.map { + case (k, s) => k -> lattice.add(s, y.value, y.size) // TODO: is the size correct here? + }) + } + } + + def sub(x: ValueSet[T], y: ValueSet[T]): ValueSet[T] = { + (x, y) match { + case (VSBottom, t) => VSBottom + case (t, VSBottom) => t + case (VS(m1), VS(m2)) => + VS(m1.keys.foldLeft(m2) { + case (acc, k) => + val v1 = m1(k) + val v2 = m2(k) + acc + (k -> lattice.sub(v1, v2)) + }) + } + } + + def sub(x: ValueSet[T], y: BitVecLiteral): ValueSet[T] = { + x match { + case VSBottom => VSBottom + case VS(m) => + VS(m.map { + case (k, s) => k -> lattice.sub(s, y.value, y.size) // TODO: is the size correct here? + }) + } + } + +// def widen(vs1: ValueSet[T], vs2: ValueSet[T]): ValueSet[T] = { +// (vs1, vs2) match { +// case (VSBottom, t) => ??? +// case (t, VSBottom) => ??? +// case (VSTop, _) => VSTop +// case (_, VSTop) => VSTop +// case (VS(m1), VS(m2)) => +// VS(m1.keys.foldLeft(m2) { +// case (acc, k) => +// val v1 = m1(k) +// val v2 = m2(k) +// acc + (k -> lattice.widen(v1, v2)) +// }) +// } +// } + + def removeLowerBounds(vs: ValueSet[T]): ValueSet[T] = { + vs match { + case VSBottom => VSBottom + case VSTop => VSTop + case VS(m) => + VS(m.map { + case (k, SI(s, l, u, w)) => k -> SI(s, lattice.lowestPossibleValue, u, w) + }) + } + } + + def removeUpperBound(vs: ValueSet[T]): ValueSet[T] = { + vs match { + case VSBottom => VSBottom + case VSTop => VSTop + case VS(m) => + VS(m.map { + case (k, SI(s, l, u, w)) => k -> SI(s, l, lattice.highestPossibleValue, w) + }) + } + } + + extension (r: DataRegion | StackRegion) + def start: BigInt = r match { + case d: DataRegion => d.start.value + case s: StackRegion => s.start.value + } + + def end(mmm: MemoryModelMap): BigInt = r match { + case d: DataRegion => mmm.getEnd(d) + case s: StackRegion => mmm.getEnd(s) + } + + /** + * ∗(vs, s): Returns a pair of sets (F, P). F represents the set of “fully accessed” a-locs: it + * consists of the a-locs that are of size s and whose starting addresses are in vs. P represents + * the set of “partially accessed” a-locs: it consists of (i) a-locs whose starting addresses are in + * vs but are not of size s, and (ii) a-locs whose addresses are in vs but whose starting addresses + * and sizes do not meet the conditions to be in F. [Reference VSA paper] + * + * @param vsR2 + * @param s size of the dereference + * @return + */ + def dereference(s: BigInt, vs: ValueSet[String], mmm: MemoryModelMap): (Set[MemoryRegion], Set[MemoryRegion]) = { + var fullyAccessedLocations = Set[MemoryRegion]() + var partiallyAccessedLocations = Set[MemoryRegion]() + vs match { + case VSBottom => VSBottom + case VSTop => ??? //TODO: should this return everything? + case VS(m) => + for (elem <- m) { + if (elem._2 != lattice.bottom) { // region SI defined + elem._2 match { + case SI(stride, lower, upper, w) => + // TODO: Global memory size can be retrieved from the symbol table and are of size s + // Map addresses to exact memory locations + val regionsWithSize = mmm.getRegionsWithSize(s, elem._1) + fullyAccessedLocations = fullyAccessedLocations ++ regionsWithSize.filter(region => (region.isInstanceOf[DataRegion | StackRegion] && region.asInstanceOf[DataRegion | StackRegion].start >= lower && region.asInstanceOf[DataRegion | StackRegion].start <= upper) || region.isInstanceOf[HeapRegion]) // TODO: THIS IS NOT THE RIGHT WAY TO CHECK IF ADDRESS IS IN SI + + // Identify partially accessed locations (if any) + val notOfSize = mmm.getRegionsWithSize(s, elem._1, true) + // start or end must be in SI + partiallyAccessedLocations = partiallyAccessedLocations ++ regionsWithSize.filter(region => region.asInstanceOf[DataRegion | StackRegion].start >= lower && region.asInstanceOf[DataRegion | StackRegion].start <= upper || ((region.asInstanceOf[DataRegion | StackRegion].end(mmm) >= lower) && region.asInstanceOf[DataRegion | StackRegion].end(mmm) <= upper)) + case _ => ??? + } + } + } + } + // Return the set of fully accessed locations and the set of partially accessed locations + (fullyAccessedLocations, partiallyAccessedLocations) + } +} + +trait Bool3 + +case object BOTTOM_BOOL3 extends Bool3 { + override def toString = "BOTTOM" +} + +case object FALSE_BOOL3 extends Bool3 { + override def toString = "FALSE" +} + +case object TURE_BOOL3 extends Bool3 { + override def toString = "TRUE" +} + +case object MAYBE_BOOL3 extends Bool3 { + override def toString = "MAYBE" +} + +/** The lattice of booleans with the standard ordering. + */ +class Bool3Lattice extends Lattice[Bool3] { + + override val bottom: Bool3 = BOTTOM_BOOL3 + + override def top: Bool3 = MAYBE_BOOL3 + + override def lub(x: Bool3, y: Bool3): Bool3 = { + (x, y) match { + case (BOTTOM_BOOL3, t) => t + case (t, BOTTOM_BOOL3) => t + case (TURE_BOOL3, FALSE_BOOL3) => MAYBE_BOOL3 + case (FALSE_BOOL3, TURE_BOOL3) => MAYBE_BOOL3 + case _ => x + } + } +} + +enum Flags { + case CF // Carry Flag + case ZF // Zero Flag + case SF // Sign Flag + case PF // Parity Flag + case AF // Auxiliary Flag + case OF // Overflow Flag +} + +/** + * case CF // Carry Flag + * case ZF // Zero Flag + * case SF // Sign Flag + * case PF // Parity Flag + * case AF // Auxiliary Flag + * case OF // Overflow Flag + */ +trait Flag + +case object BOTTOM_Flag extends Flag { + override def toString = "BOTTOM_FLAG" +} + +case class FlagMap(m: Map[Flags, Bool3]) extends Flag { + override def toString: String = m.toString +} + + +/** The lattice of booleans with the standard ordering. + */ +class FlagLattice extends Lattice[Flag] { + + override val bottom: Flag = BOTTOM_Flag + + override def top: Flag = FlagMap(Map( + Flags.CF -> MAYBE_BOOL3, + Flags.ZF -> MAYBE_BOOL3, + Flags.SF -> MAYBE_BOOL3, + Flags.PF -> MAYBE_BOOL3, + Flags.AF -> MAYBE_BOOL3, + Flags.OF -> MAYBE_BOOL3 + )) + + val lattice: Bool3Lattice = Bool3Lattice() + + override def lub(x: Flag, y: Flag): Flag = { + (x, y) match { + case (BOTTOM_Flag, t) => t + case (t, BOTTOM_Flag) => t + case (FlagMap(m1), FlagMap(m2)) => + FlagMap(m1.keys.foldLeft(m2) { + case (acc, k) => + val v1 = m1(k) + val v2 = m2(k) + acc + (k -> lattice.lub(v1, v2)) + }) + } + } + + def setFlag(flag: Flags, value: Bool3): Flag = { + FlagMap(Map(flag -> value)) + } +} + + /** The powerset lattice of a set of elements of type `A` with subset ordering. */ class PowersetLattice[A] extends Lattice[Set[A]] { diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 2fa9b841e..212ac951a 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -11,14 +11,15 @@ trait MemoryRegionAnalysis(val program: Program, val globals: Map[BigInt, String], val globalOffsets: Map[BigInt, BigInt], val subroutines: Map[BigInt, String], - val constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + val constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], val ANRResult: Map[CFGPosition, Set[Variable]], val RNAResult: Map[CFGPosition, Set[Variable]], - val regionAccesses: Map[CfgNode, Map[RegisterVariableWrapper, FlatElement[Expr]]], + val regionAccesses: Map[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]], val reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], - val maxDepth: Int) { + val maxDepth: Int, + val exactMatch: Boolean) { - var mallocCount: Int = 0 + private var mallocCount: Int = 0 private var stackCount: Int = 0 val stackMap: mutable.Map[Procedure, mutable.Map[Expr, StackRegion]] = mutable.Map() @@ -89,7 +90,7 @@ trait MemoryRegionAnalysis(val program: Program, private val spList = ListBuffer[Expr](stackPointer) private val ignoreRegions: Set[Expr] = Set(linkRegister, framePointer) // TODO: this could be used instead of regionAccesses in other analyses to reduce the Expr to region conversion - private val registerToRegions: mutable.Map[RegisterVariableWrapper, mutable.Set[MemoryRegion]] = mutable.Map() + private val registerToRegions: mutable.Map[RegisterWrapperPartialEquality, mutable.Set[MemoryRegion]] = mutable.Map() val procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]] = mutable.Map() var depthMap: mutable.Map[CFGPosition, Int] = mutable.Map() @@ -118,19 +119,21 @@ trait MemoryRegionAnalysis(val program: Program, println(s"Unreducible: $i") eval(i.rhs, Set.empty, i) } - evaluateExpression(binExpr.arg2, constantProp(n)) match { - case Some(b: BitVecLiteral) => - regions.foreach { - case stackRegion: StackRegion => - val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) - evaluateExpression(nextOffset, constantProp(n)) match { - case Some(b2: BitVecLiteral) => - reducedRegions = reducedRegions + poolMaster(b2, IRWalk.procedure(n)) - case None => - } - case _ => - } - case None => + for (elem <- evaluateExpressionWithSSA(binExpr.arg2, constantProp(n), n, reachingDefs, exactMatch)) { + elem match { + case b: BitVecLiteral => + regions.foreach { + case stackRegion: StackRegion => + val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) + for (elem <- evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs, exactMatch)) { + elem match { + case b2: BitVecLiteral => + reducedRegions = reducedRegions + poolMaster(b2, IRWalk.procedure(n)) + } + } + case _ => + } + } } } case _ => @@ -140,66 +143,72 @@ trait MemoryRegionAnalysis(val program: Program, def eval(exp: Expr, env: Set[MemoryRegion], n: Command): Set[MemoryRegion] = { println(s"Asked to evaluate: $exp at ${n.label}") + val regionsToReturn = mutable.Set[MemoryRegion]() exp match { case binOp: BinaryExpr => if (spList.contains(binOp.arg1)) { - evaluateExpression(binOp.arg2, constantProp(n)) match { - case Some(b: BitVecLiteral) => Set(poolMaster(b, IRWalk.procedure(n))) - case None => throw RuntimeException(s"This should be reducible: $exp") + for (elem <- evaluateExpressionWithSSA(binOp.arg2, constantProp(n), n, reachingDefs, exactMatch)) { + elem match { + case b: BitVecLiteral => regionsToReturn.addAll(Set(poolMaster(b, IRWalk.procedure(n)))) + } } } else { val reduced = reducibleToRegion(binOp, n) if (reduced.nonEmpty) { println(s"Reducible: exp $exp") - reduced + regionsToReturn.addAll(reduced) } else { - evaluateExpression(binOp, constantProp(n)) match { - case Some(b: BitVecLiteral) => eval(b, env, n) - case None => eval(binOp.arg1, env, n) ++ eval(binOp.arg2, env, n) + val elems = evaluateExpressionWithSSA(binOp, constantProp(n), n, reachingDefs, exactMatch) + for (elem <- elems) { + elem match { + case b: BitVecLiteral => regionsToReturn.addAll(eval(b, env, n)) + } + } + if (elems.isEmpty) { + regionsToReturn.addAll(eval(binOp.arg1, env, n) ++ eval(binOp.arg2, env, n)) } } } case variable: Variable => variable match { case _: LocalVar => - env case reg: Register if spList.contains(reg) => - Set(poolMaster(BitVecLiteral(0, 64), IRWalk.procedure(n))) + regionsToReturn.addAll(Set(poolMaster(BitVecLiteral(0, 64), IRWalk.procedure(n)))) case _ => - evaluateExpression(variable, constantProp(n)) match { - case Some(b: BitVecLiteral) => - eval(b, env, n) - case _ => - env // we cannot evaluate this to a concrete value, we need VSA for this + for (elem <- evaluateExpressionWithSSA(variable, constantProp(n), n, reachingDefs, exactMatch)) { + elem match { + case b: BitVecLiteral => + regionsToReturn.addAll(eval(b, env, n)) + } } } case memoryLoad: MemoryLoad => - eval(memoryLoad.index, env, n) + regionsToReturn.addAll(eval(memoryLoad.index, env, n)) // ignore case where it could be a global region (loaded later in MMM from relf) case b: BitVecLiteral => - env case literal: Literal => // ignore literals other than BitVectors - env case extract: Extract => - eval(extract.body, env, n) + regionsToReturn.addAll(eval(extract.body, env, n)) case repeat: Repeat => - eval(repeat.body, env, n) + regionsToReturn.addAll(eval(repeat.body, env, n)) case zeroExtend: ZeroExtend => - eval(zeroExtend.body, env, n) + regionsToReturn.addAll(eval(zeroExtend.body, env, n)) case signExtend: SignExtend => - eval(signExtend.body, env, n) + regionsToReturn.addAll(eval(signExtend.body, env, n)) case unaryExpr: UnaryExpr => - eval(unaryExpr.arg, env, n) + regionsToReturn.addAll(eval(unaryExpr.arg, env, n)) case memoryStore: MemoryStore => - eval(memoryStore.index, env, n) ++ eval(memoryStore.value, env, n) + regionsToReturn.addAll(eval(memoryStore.index, env, n) ++ eval(memoryStore.value, env, n)) case memory: Memory => - env } + regionsToReturn.toSet } /** Transfer function for state lattice elements. */ - def localTransfer(n: CFGPosition, s: Set[MemoryRegion]): Set[MemoryRegion] = n match { + def localTransfer(n: CFGPosition, s: Set[MemoryRegion]): Set[MemoryRegion] = + var m = s + n match { case cmd: Command => println(s"N: $n") cmd match { @@ -222,44 +231,72 @@ trait MemoryRegionAnalysis(val program: Program, // } // } if (directCall.target.name == "malloc") { - evaluateExpression(mallocVariable, constantProp(n)) match { - case Some(b: BitVecLiteral) => regionLattice.lub(s, Set(HeapRegion(nextMallocCount(), b, IRWalk.procedure(n)))) - case None => s + for (elem <- evaluateExpressionWithSSA(mallocVariable, constantProp(n), n, reachingDefs, exactMatch)) { + elem match { + case b: BitVecLiteral => m = regionLattice.lub(m, Set(HeapRegion(nextMallocCount(), b, IRWalk.procedure(n)))) + } } - } else { - s } case memAssign: MemoryAssign => - val result = eval(memAssign.rhs.index, s, cmd) - regionLattice.lub(s, result) + val result = eval(memAssign.rhs.index, m, cmd) + m = regionLattice.lub(m, result) case localAssign: LocalAssign => stackDetection(localAssign) - val result = eval(localAssign.rhs, s, cmd) - regionLattice.lub(s, result) - case _ => s + val result = eval(localAssign.rhs, m, cmd) + m = regionLattice.lub(m, result) + case _ => } - case _ => s // ignore other kinds of nodes + case _ => // ignore other kinds of nodes } + m def transferUnlifted(n: CFGPosition, s: Set[MemoryRegion]): Set[MemoryRegion] = localTransfer(n, s) } class MemoryRegionAnalysisSolver( - program: Program, - globals: Map[BigInt, String], - globalOffsets: Map[BigInt, BigInt], - subroutines: Map[BigInt, String], - constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - ANRResult: Map[CFGPosition, Set[Variable]], - RNAResult: Map[CFGPosition, Set[Variable]], - regionAccesses: Map[CfgNode, Map[RegisterVariableWrapper, FlatElement[Expr]]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], - maxDepth: Int - ) extends MemoryRegionAnalysis(program, globals, globalOffsets, subroutines, constantProp, ANRResult, RNAResult, regionAccesses, reachingDefs, maxDepth) + program: Program, + globals: Map[BigInt, String], + globalOffsets: Map[BigInt, BigInt], + subroutines: Map[BigInt, String], + constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], + ANRResult: Map[CFGPosition, Set[Variable]], + RNAResult: Map[CFGPosition, Set[Variable]], + regionAccesses: Map[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]], + reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], + maxDepth: Int, + exactMatch: Boolean = true + ) extends MemoryRegionAnalysis(program, globals, globalOffsets, subroutines, constantProp, ANRResult, RNAResult, regionAccesses, reachingDefs, maxDepth, exactMatch) with IRIntraproceduralForwardDependencies with Analysis[Map[CFGPosition, LiftedElement[Set[MemoryRegion]]]] with WorklistFixpointSolverWithReachability[CFGPosition, Set[MemoryRegion], PowersetLattice[MemoryRegion]] { + override def funsub(n: CFGPosition, x: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]]): LiftedElement[Set[MemoryRegion]] = { + n match { + // function entry nodes are always reachable as this is intraprocedural + case _: Procedure => liftedLattice.lift(regionLattice.bottom) + // all other nodes are processed with join+transfer + case _ => super.funsub(n, x) + } + } +} + +class InterprocMemoryRegionAnalysisSolver( + program: Program, + globals: Map[BigInt, String], + globalOffsets: Map[BigInt, BigInt], + subroutines: Map[BigInt, String], + constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], + ANRResult: Map[CFGPosition, Set[Variable]], + RNAResult: Map[CFGPosition, Set[Variable]], + regionAccesses: Map[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]], + reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], + maxDepth: Int, + exactMatch: Boolean = false + ) extends MemoryRegionAnalysis(program, globals, globalOffsets, subroutines, constantProp, ANRResult, RNAResult, regionAccesses, reachingDefs, maxDepth, exactMatch) + with IRInterproceduralForwardDependencies + with Analysis[Map[CFGPosition, LiftedElement[Set[MemoryRegion]]]] + with WorklistFixpointSolverWithReachability[CFGPosition, Set[MemoryRegion], PowersetLattice[MemoryRegion]] { + override def funsub(n: CFGPosition, x: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]]): LiftedElement[Set[MemoryRegion]] = { n match { // function entry nodes are always reachable as this is intraprocedural diff --git a/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala b/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala index 93aa9ab43..39e0723a6 100644 --- a/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala +++ b/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala @@ -28,7 +28,7 @@ case class ReachingDefinitionsAnalysis(program: Program) { private def generateUniqueDefinition( variable: Variable ): LocalAssign = { - LocalAssign(variable, BitVecLiteral(0, 0)) + LocalAssign(variable, Register("Unique", BitVecType(64))) } def transfer(n: CFGPosition, s: (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]])): (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]) = @@ -71,6 +71,15 @@ case class ReachingDefinitionsAnalysis(program: Program) { transformUses(assume.body.variables, s) case indirectCall: IndirectCall => transformUses(indirectCall.target.variables, s) + case directCall: DirectCall if directCall.target.name == "malloc" => + // assume R0 has been assigned, generate a fake definition + val mallocVar = Register("R0", BitVecType(64)) + val mallocDef = generateUniqueDefinition(mallocVar) + val mallocUseDefs: Map[Variable, Set[Definition]] = Set(mallocVar).foldLeft(Map.empty[Variable, Set[Definition]]) { + case (acc, v) => + acc + (v -> s._1(v)) + } + (s._1 + (Register("R0", BitVecType(64)) -> Set(mallocDef)), mallocUseDefs) case _ => s } } @@ -79,3 +88,8 @@ class ReachingDefinitionsAnalysisSolver(program: Program) extends ReachingDefinitionsAnalysis(program) with SimpleWorklistFixpointSolver[CFGPosition, (Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]], Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]]), ReachingDefinitionsAnalysis#TupleElement] with IRIntraproceduralForwardDependencies + +class InterprocReachingDefinitionsAnalysisSolver(program: Program) + extends ReachingDefinitionsAnalysis(program) + with SimpleWorklistFixpointSolver[CFGPosition, (Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]], Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]]), ReachingDefinitionsAnalysis#TupleElement] + with IRInterproceduralForwardDependencies \ No newline at end of file diff --git a/src/main/scala/analysis/RegToMemAnalysis.scala b/src/main/scala/analysis/RegToMemAnalysis.scala index 0f5e5a073..d592baf57 100644 --- a/src/main/scala/analysis/RegToMemAnalysis.scala +++ b/src/main/scala/analysis/RegToMemAnalysis.scala @@ -16,9 +16,9 @@ import scala.collection.immutable */ trait RegionAccessesAnalysis(cfg: ProgramCfg, constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])]) { - val mapLattice: MapLattice[RegisterVariableWrapper, FlatElement[Expr], FlatLattice[Expr]] = MapLattice(FlatLattice[_root_.ir.Expr]()) + val mapLattice: MapLattice[RegisterWrapperPartialEquality, FlatElement[Expr], FlatLattice[Expr]] = MapLattice(FlatLattice[_root_.ir.Expr]()) - val lattice: MapLattice[CfgNode, Map[RegisterVariableWrapper, FlatElement[Expr]], MapLattice[RegisterVariableWrapper, FlatElement[Expr], FlatLattice[Expr]]] = MapLattice(mapLattice) + val lattice: MapLattice[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]], MapLattice[RegisterWrapperPartialEquality, FlatElement[Expr], FlatLattice[Expr]]] = MapLattice(mapLattice) val domain: Set[CfgNode] = cfg.nodes.toSet @@ -26,15 +26,15 @@ trait RegionAccessesAnalysis(cfg: ProgramCfg, constantProp: Map[CFGPosition, Map /** Default implementation of eval. */ - def eval(cmd: CfgCommandNode, constants: Map[Variable, FlatElement[BitVecLiteral]], s: Map[RegisterVariableWrapper, FlatElement[Expr]]): Map[RegisterVariableWrapper, FlatElement[Expr]] = { + def eval(cmd: CfgCommandNode, constants: Map[Variable, FlatElement[BitVecLiteral]], s: Map[RegisterWrapperPartialEquality, FlatElement[Expr]]): Map[RegisterWrapperPartialEquality, FlatElement[Expr]] = { cmd.data match { case localAssign: LocalAssign => localAssign.rhs match { case memoryLoad: MemoryLoad => - s + (RegisterVariableWrapper(localAssign.lhs, getDefinition(localAssign.lhs, cmd.data, reachingDefs)) -> FlatEl(memoryLoad)) + s + (RegisterWrapperPartialEquality(localAssign.lhs, getDefinition(localAssign.lhs, cmd.data, reachingDefs)) -> FlatEl(memoryLoad)) case binaryExpr: BinaryExpr => if (evaluateExpression(binaryExpr.arg1, constants).isEmpty) { // approximates Base + Offset - s + (RegisterVariableWrapper(localAssign.lhs, getDefinition(localAssign.lhs, cmd.data, reachingDefs)) -> FlatEl(binaryExpr)) + s + (RegisterWrapperPartialEquality(localAssign.lhs, getDefinition(localAssign.lhs, cmd.data, reachingDefs)) -> FlatEl(binaryExpr)) } else { s } @@ -47,7 +47,7 @@ trait RegionAccessesAnalysis(cfg: ProgramCfg, constantProp: Map[CFGPosition, Map /** Transfer function for state lattice elements. */ - def localTransfer(n: CfgNode, s: Map[RegisterVariableWrapper, FlatElement[Expr]]): Map[RegisterVariableWrapper, FlatElement[Expr]] = n match { + def localTransfer(n: CfgNode, s: Map[RegisterWrapperPartialEquality, FlatElement[Expr]]): Map[RegisterWrapperPartialEquality, FlatElement[Expr]] = n match { case cmd: CfgCommandNode => eval(cmd, constantProp(cmd.data), s) case _ => s // ignore other kinds of nodes @@ -55,7 +55,7 @@ trait RegionAccessesAnalysis(cfg: ProgramCfg, constantProp: Map[CFGPosition, Map /** Transfer function for state lattice elements. */ - def transfer(n: CfgNode, s: Map[RegisterVariableWrapper, FlatElement[Expr]]): Map[RegisterVariableWrapper, FlatElement[Expr]] = localTransfer(n, s) + def transfer(n: CfgNode, s: Map[RegisterWrapperPartialEquality, FlatElement[Expr]]): Map[RegisterWrapperPartialEquality, FlatElement[Expr]] = localTransfer(n, s) } class RegionAccessesAnalysisSolver( @@ -64,6 +64,6 @@ class RegionAccessesAnalysisSolver( reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], ) extends RegionAccessesAnalysis(cfg, constantProp, reachingDefs) with InterproceduralForwardDependencies - with Analysis[Map[CfgNode, Map[RegisterVariableWrapper, FlatElement[Expr]]]] - with SimpleWorklistFixpointSolver[CfgNode, Map[RegisterVariableWrapper, FlatElement[Expr]], MapLattice[RegisterVariableWrapper, FlatElement[Expr], FlatLattice[Expr]]] { + with Analysis[Map[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]]] + with SimpleWorklistFixpointSolver[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]], MapLattice[RegisterWrapperPartialEquality, FlatElement[Expr], FlatLattice[Expr]]] { } \ No newline at end of file diff --git a/src/main/scala/analysis/UtilMethods.scala b/src/main/scala/analysis/UtilMethods.scala index 12b46463b..ff17475e8 100644 --- a/src/main/scala/analysis/UtilMethods.scala +++ b/src/main/scala/analysis/UtilMethods.scala @@ -55,7 +55,19 @@ def evaluateExpression(exp: Expr, constantPropResult: Map[Variable, FlatElement[ } } -def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], n: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])]): Set[BitVecLiteral] = { +/** + * Evaluate an expression in a hope of finding bitVector values for a global variable. + * If exactEquality is true, then the evaluation is done with exact equality. + * By default, exactEquality is true. + * Disabling exactEquality will allow for loose (intersection) equality and thus assist with interprocedural analysis. + * @param exp + * @param constantPropResult + * @param n + * @param reachingDefs + * @param exactEquality + * @return + */ +def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], n: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], exactEquality: Boolean = true): Set[BitVecLiteral] = { def apply(op: (BitVecLiteral, BitVecLiteral) => BitVecLiteral, a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = val res = for { x <- a @@ -72,8 +84,8 @@ def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapper exp match { case binOp: BinaryExpr => - val lhs = evaluateExpressionWithSSA(binOp.arg1, constantPropResult, n, reachingDefs) - val rhs = evaluateExpressionWithSSA(binOp.arg2, constantPropResult, n, reachingDefs) + val lhs = evaluateExpressionWithSSA(binOp.arg1, constantPropResult, n, reachingDefs, exactEquality) + val rhs = evaluateExpressionWithSSA(binOp.arg2, constantPropResult, n, reachingDefs, exactEquality) (lhs, rhs) match { case (l: Set[BitVecLiteral], r: Set[BitVecLiteral]) => @@ -90,7 +102,7 @@ def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapper } } case unaryExpr: UnaryExpr => - val result = evaluateExpressionWithSSA(unaryExpr.arg, constantPropResult, n, reachingDefs) + val result = evaluateExpressionWithSSA(unaryExpr.arg, constantPropResult, n, reachingDefs, exactEquality) unaryExpr.op match { case BVNEG => applySingle(BitVectorEval.smt_bvneg, result) @@ -99,13 +111,19 @@ def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapper case _ => throw new RuntimeException("Unary operation support not implemented: " + unaryExpr.op) } case extend: ZeroExtend => - val result = evaluateExpressionWithSSA(extend.body, constantPropResult, n, reachingDefs) + val result = evaluateExpressionWithSSA(extend.body, constantPropResult, n, reachingDefs, exactEquality) applySingle(BitVectorEval.smt_zero_extend(extend.extension, _: BitVecLiteral), result) + case se: SignExtend => + val result = evaluateExpressionWithSSA(se.body, constantPropResult, n, reachingDefs, exactEquality) + applySingle(BitVectorEval.smt_sign_extend(se.extension, _: BitVecLiteral), result) case e: Extract => - val result = evaluateExpressionWithSSA(e.body, constantPropResult, n, reachingDefs) + val result = evaluateExpressionWithSSA(e.body, constantPropResult, n, reachingDefs, exactEquality) applySingle(BitVectorEval.boogie_extract(e.end, e.start, _: BitVecLiteral), result) case variable: Variable => - constantPropResult(RegisterWrapperEqualSets(variable, getUse(variable, n, reachingDefs))) + if exactEquality then + constantPropResult(RegisterWrapperEqualSets(variable, getUse(variable, n, reachingDefs))) + else + constantPropResult.asInstanceOf[Map[RegisterWrapperPartialEquality, Set[BitVecLiteral]]](RegisterWrapperPartialEquality(variable, getUse(variable, n, reachingDefs))) case b: BitVecLiteral => Set(b) case _ => throw new RuntimeException("ERROR: CASE NOT HANDLED: " + exp + "\n") } diff --git a/src/main/scala/analysis/solvers/AbstractSPAnalysis.scala b/src/main/scala/analysis/solvers/AbstractSPAnalysis.scala new file mode 100644 index 000000000..36efb4a9d --- /dev/null +++ b/src/main/scala/analysis/solvers/AbstractSPAnalysis.scala @@ -0,0 +1,95 @@ +//package analysis.solvers +// +//import ir.* +//import analysis.solvers._ +//import analysis.* +// +//import scala.collection.immutable +//import scala.collection.mutable +// +// +//class AbstractSP(val locations: Set[BitVecLiteral], val definitions: Set[LocalAssign]) { +// override def toString: String = "AbstractSP(" + location + ")" +// +// def add(that: BitVecLiteral, definer: Set[LocalAssign]): AbstractSP = { +// val newLocations = locations.map(l => BitVectorEval.smt_bvadd(l, that)) +// AbstractSP(newLocations, definer) +// } +// +// def sub(that: BitVecLiteral, definer: Set[LocalAssign]): AbstractSP = { +// val newLocations = locations.map(l => BitVectorEval.smt_bvsub(l, that)) +// AbstractSP(newLocations, definer) +// } +// +// def union(that: AbstractSP): AbstractSP = { +// AbstractSP(locations ++ that.locations, definitions ++ that.definitions) +// } +//} +// +//class TopAbstractSP extends AbstractSP(Set.empty, Set.empty) { +// override def toString: String = "TopAbstractSP" +//} +// +// +///** +// * Tracks the stack pointer abstractly and offers calculations for the stack pointer. +// * Uses +// */ +//trait AbstractSPAnalysis(program: Program, constantProp: Map[CFGPosition, Map[RegisterWrapperPartialEquality, Set[BitVecLiteral]]]) { +// +// val mapLattice: MapLattice[RegisterWrapperPartialEquality, FlatElement[AbstractSP], FlatLattice[AbstractSP]] = MapLattice(AbstractSPLattice()) +// +// val lattice: MapLattice[CFGPosition, Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]], MapLattice[RegisterWrapperPartialEquality, FlatElement[AbstractSP], FlatLattice[AbstractSP]]] = MapLattice(mapLattice) +// +// val domain: Set[CFGPosition] = Set.empty ++ program +// +// private val stackPointer = Register("R31", BitVecType(64)) +// +// /** Default implementation of eval. +// */ +// def eval(cmd: Command, s: Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]]): Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]] = { +// +// } +// +// /** Transfer function for state lattice elements. +// */ +// def localTransfer(n: CFGPosition, s: Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]]): Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]] = n match { +// case r: Command => +// r match { +// // assignments +// case la: LocalAssign => +// if (la.lhs == stackPointer) { +// val reachingDefs = getDefinition(la.lhs, n, reachingDefs) +// val rhs = eval(la.rhs, s, n, reachingDefs) +// val rhsLocations = rhs.locations +// val rhsDefinitions = rhs.definitions +// val lhs = AbstractSP(rhsLocations, rhsDefinitions) +// s + (la.lhs -> FlatEl(lhs)) +// } else { +// s + (la.lhs -> eval(la.rhs, s)) +// } +// +// val lhsWrappers = s.collect { +// case (k, v) if RegisterWrapperPartialEquality(k.variable, k.assigns) == RegisterWrapperPartialEquality(la.lhs, getDefinition(la.lhs, r, reachingDefs)) => (k, v) +// } +// if (lhsWrappers.nonEmpty) { +// s ++ lhsWrappers.map((k, v) => (RegisterWrapperEqualSets(k.variable, k.assigns ++ getDefinition(la.lhs, r, reachingDefs)), v.union(eval(la.rhs, s, r)))) +// } else { +// s + (RegisterWrapperEqualSets(la.lhs, getDefinition(la.lhs, r, reachingDefs)) -> eval(la.rhs, s, n)) +// } +// // all others: like no-ops +// case _ => s +// } +// case _ => s +// } +// +// /** Transfer function for state lattice elements. +// */ +// def transfer(n: CFGPosition, s: Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]]): Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]] = localTransfer(n, s) +//} +// +//class AbstractSPAnalysisSolver(program: Program, constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]]) extends AbstractSPAnalysis(program, constantProp) +// with IRIntraproceduralForwardDependencies +// with Analysis[Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]]] +// with SimpleWorklistFixpointSolver[CFGPosition, Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]], MapLattice[RegisterWrapperPartialEquality, FlatElement[AbstractSP], FlatLattice[AbstractSP]]] { +//} \ No newline at end of file diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index cbb9b1166..c93633e38 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -50,17 +50,17 @@ case class IRContext( /** Stores the results of the static analyses. */ case class StaticAnalysisContext( - cfg: ProgramCfg, - constPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - IRconstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - memoryRegionResult: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]], - vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], - interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]], - paramResults: Map[Procedure, Set[Variable]], - steensgaardResults: Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]], - mmmResults: MemoryModelMap, - memoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])] + cfg: ProgramCfg, + constPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + IRconstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + memoryRegionResult: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]], + vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], + interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]], + paramResults: Map[Procedure, Set[Variable]], + steensgaardResults: Map[RegisterWrapperPartialEquality, Set[RegisterWrapperPartialEquality | MemoryRegion]], + mmmResults: MemoryModelMap, + memoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], + reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])] ) /** Results of the main program execution. @@ -315,11 +315,11 @@ object IRTransform { } def resolveIndirectCallsUsingPointsTo( - cfg: ProgramCfg, - pointsTos: Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]], - regionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], - IRProgram: Program + cfg: ProgramCfg, + pointsTos: Map[RegisterWrapperPartialEquality, Set[RegisterWrapperPartialEquality | MemoryRegion]], + regionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], + reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], + IRProgram: Program ): Boolean = { var modified: Boolean = false val worklist = ListBuffer[CfgNode]() @@ -374,11 +374,11 @@ object IRTransform { def resolveAddresses(variable: Variable, n: CfgNode): mutable.Set[String] = { val names = mutable.Set[String]() - val variableWrapper = RegisterVariableWrapper(variable, getUse(variable, n.asInstanceOf[CfgCommandNode].data, reachingDefs)) + val variableWrapper = RegisterWrapperPartialEquality(variable, getUse(variable, n.asInstanceOf[CfgCommandNode].data, reachingDefs)) pointsTos.get(variableWrapper) match { case Some(value) => value.map { - case v: RegisterVariableWrapper => names.addAll(resolveAddresses(v.variable, n)) + case v: RegisterWrapperPartialEquality => names.addAll(resolveAddresses(v.variable, n)) case m: MemoryRegion => names.addAll(searchRegion(m)) } names @@ -552,7 +552,7 @@ object StaticAnalysis { }) Logger.info("[!] Running Reaching Definitions Analysis") - val reachingDefinitionsAnalysisSolver = ReachingDefinitionsAnalysisSolver(IRProgram) + val reachingDefinitionsAnalysisSolver = InterprocReachingDefinitionsAnalysisSolver(IRProgram) val reachingDefinitionsAnalysisResults = reachingDefinitionsAnalysisSolver.analyze() println(s"Finished reaching definitions at ${(System.nanoTime() - before) / 1000000} ms") @@ -581,7 +581,7 @@ object StaticAnalysis { println(s"Finished ConstProp with SSA at ${(System.nanoTime() - before) / 1000000} ms") Logger.info("[!] Running MRA") - val mraSolver = MemoryRegionAnalysisSolver(IRProgram, globalAddresses, globalOffsets, mergedSubroutines, constPropResult, ANRResult, RNAResult, regionAccessesAnalysisResults, reachingDefinitionsAnalysisResults, maxDepth = 3) + val mraSolver = InterprocMemoryRegionAnalysisSolver(IRProgram, globalAddresses, globalOffsets, mergedSubroutines, constPropResultWithSSA, ANRResult, RNAResult, regionAccessesAnalysisResults, reachingDefinitionsAnalysisResults, maxDepth = 3) val mraResult = mraSolver.analyze() Logger.info("[!] Running MMM") @@ -590,7 +590,7 @@ object StaticAnalysis { mmm.logRegions() Logger.info("[!] Injecting regions") - val regionInjector = RegionInjector(domain, constPropResultWithSSA, mmm, reachingDefinitionsAnalysisResults, globalOffsets) + val regionInjector = RegionInjector(domain, IRProgram, constPropResultWithSSA, mmm, reachingDefinitionsAnalysisResults, globalOffsets) regionInjector.nodeVisitor() Logger.info("[!] Running Steensgaard") @@ -612,6 +612,11 @@ object StaticAnalysis { s"${s}_new_ir_constprop$iteration.dot" ) + writeToFile( + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> constPropResultWithSSA(b).toString).toMap), + s"${s}_new_ir_constpropWithSSA$iteration.dot" + ) + writeToFile( toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> mraResult(b).toString).toMap), s"${s}_MRA$iteration.dot" @@ -623,17 +628,19 @@ object StaticAnalysis { // ValueSetAnalysisSolver(IRProgram, globalAddresses, externalAddresses, globalOffsets, subroutines, mmm, constPropResult) // val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = vsaSolver.analyze() - val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = Map() - val actualVSA = ActualVSA(IRProgram, constPropResult, reachingDefinitionsAnalysisResults, mmm) - val actualVSAResults: mutable.Map[CFGPosition, actualVSA.AbsEnv] = actualVSA.IntraProceduralVSA() - config.analysisDotPath.foreach(s => { - writeToFile( - toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> actualVSAResults.withDefaultValue(actualVSA.AbsEnv(mutable.Map(), mutable.Map(), mutable.Map())).get(b).toString).toMap), - s"${s}_ActualVSA$iteration.dot" - ) - }) + val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = Map() +// +// val actualVSA = ActualVSA(IRProgram, constPropResult, reachingDefinitionsAnalysisResults, mmm) +// val actualVSAResults: mutable.Map[CFGPosition, actualVSA.AbsEnv] = actualVSA.IntraProceduralVSA() +// +// config.analysisDotPath.foreach(s => { +// writeToFile( +// toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> actualVSAResults.withDefaultValue(actualVSA.AbsEnv()).get(b).toString).toMap), +// s"${s}_ActualVSA$iteration.dot" +// ) +// }) Logger.info("[!] Running Interprocedural Live Variables Analysis") //val interLiveVarsResults = InterLiveVarsAnalysis(IRProgram).analyze() From 5ec6d7ca7143b8f21815b01922814f57dcfd62cc Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Wed, 1 May 2024 14:36:05 +1000 Subject: [PATCH 031/104] Injecting memory regions into IR --- build.sbt | 2 + src/main/scala/analysis/RegionInjector.scala | 252 +++++++++++++++++++ src/main/scala/analysis/UtilMethods.scala | 23 +- src/main/scala/util/RunUtils.scala | 52 ++-- 4 files changed, 311 insertions(+), 18 deletions(-) create mode 100644 src/main/scala/analysis/RegionInjector.scala diff --git a/build.sbt b/build.sbt index ffe4b9bba..ca2432607 100644 --- a/build.sbt +++ b/build.sbt @@ -10,6 +10,7 @@ val scalactic = "org.scalactic" %% "scalactic" % "3.2.10" val antlrRuntime = "org.antlr" % "antlr4-runtime" % "4.9.3" val sourceCode = "com.lihaoyi" %% "sourcecode" % "0.3.0" val mainArgs = "com.lihaoyi" %% "mainargs" % "0.5.1" +val parralelCollections = "org.scala-lang.modules" %% "scala-parallel-collections" % "1.0.4" lazy val root = project .in(file(".")) @@ -26,6 +27,7 @@ lazy val root = project libraryDependencies += scalaTests, libraryDependencies += sourceCode, libraryDependencies += mainArgs, + libraryDependencies += parralelCollections, libraryDependencies += "org.scalameta" %% "munit" % "0.7.29" % Test ) diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala new file mode 100644 index 000000000..db7c95b1d --- /dev/null +++ b/src/main/scala/analysis/RegionInjector.scala @@ -0,0 +1,252 @@ +package analysis + +import ir.* +import util.Logger +import scala.collection.immutable +import scala.collection.mutable + +/** + * Replaces the region access with the calculated memory region. + */ +class RegionInjector(domain: mutable.Set[CFGPosition], + constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], + mmm: MemoryModelMap, + reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], + globalOffsets: Map[BigInt, BigInt]) { + private val stackPointer = Register("R31", BitVecType(64)) + + def nodeVisitor(): Unit = { + for (elem <- domain) {localTransfer(elem)} + } + + /** + * Used to reduce an expression that may be a sub-region of a memory region. + * Pointer reduction example: + * R2 = R31 + 20 + * Mem[R2 + 8] <- R1 + * + * Steps: + * 1) R2 = R31 + 20 <- ie. stack access (assume R31 = stackPointer) + * ↓ + * R2 = StackRegion("stack_1", 20) + * + * 2) Mem[R2 + 8] <- R1 <- ie. memStore + * ↓ + * (StackRegion("stack_1", 20) + 8) <- R1 + * ↓ + * MMM.get(20 + 8) <- R1 + * + * @param binExpr + * @param n + * @return Set[MemoryRegion]: a set of regions that the expression may be pointing to + */ + def reducibleToRegion(binExpr: BinaryExpr, n: Command): Set[MemoryRegion] = { + var reducedRegions = Set.empty[MemoryRegion] + binExpr.arg1 match { + case variable: Variable => + evaluateExpressionWithSSA(binExpr, constantProp(n), n, reachingDefs).foreach { b => + val region = mmm.findDataObject(b.value) + reducedRegions = reducedRegions ++ region + } + if (reducedRegions.nonEmpty) { + return reducedRegions + } + val ctx = getUse(variable, n, reachingDefs) + for (i <- ctx) { + if (i != n) { // handles loops (ie. R19 = R19 + 1) %00000662 in jumptable2 + val regions = i.rhs match { + case loadL: MemoryLoad => + val foundRegions = exprToRegion(loadL.index, i) + val toReturn = mutable.Set[MemoryRegion]().addAll(foundRegions) + for { + f <- foundRegions + } { + // TODO: Must enable this (probably need to calculate those contents beforehand) +// if (memoryRegionContents.contains(f)) { +// memoryRegionContents(f).foreach { +// case b: BitVecLiteral => +// // val region = mmm.findDataObject(b.value) +// // if (region.isDefined) { +// // toReturn.addOne(region.get) +// // } +// case r: MemoryRegion => +// toReturn.addOne(r) +// toReturn.remove(f) +// } +// } + } + toReturn.toSet + case _: BitVecLiteral => + Set.empty[MemoryRegion] + case _ => + println(s"Unknown expression: ${i}") + println(ctx) + exprToRegion(i.rhs, i) + } + val results = evaluateExpressionWithSSA(binExpr.arg2, constantProp(n), n, reachingDefs) + for { + b <- results + r <- regions + } { + r match { + case stackRegion: StackRegion => + val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) + evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => + reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, b2), n) + } + case dataRegion: DataRegion => + val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start, globalOffsets), b) + evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => + reducedRegions ++= exprToRegion(b2, n) + } + case _ => + } + } + } + } + case _ => + } + reducedRegions + } + + /** + * Finds a region for a given expression using MMM results + * + * @param expr + * @param n + * @return Set[MemoryRegion]: a set of regions that the expression may be pointing to + */ + def exprToRegion(expr: Expr, n: Command): Set[MemoryRegion] = { + var res = Set[MemoryRegion]() + mmm.popContext() + mmm.pushContext(IRWalk.procedure(n).name) + expr match { // TODO: Stack detection here should be done in a better way or just merged with data + case binOp: BinaryExpr if binOp.arg1 == stackPointer => + evaluateExpressionWithSSA(binOp.arg2, constantProp(n), n, reachingDefs).foreach { b => + if binOp.arg2.variables.exists { v => v.sharedVariable } then { + Logger.debug("Shared stack object: " + b) + Logger.debug("Shared in: " + expr) + val regions = mmm.findSharedStackObject(b.value) + Logger.debug("found: " + regions) + res ++= regions + } else { + val region = mmm.findStackObject(b.value) + if (region.isDefined) { + res = res + region.get + } + } + } + res + case binaryExpr: BinaryExpr => + res ++= reducibleToRegion(binaryExpr, n) + res + case v: Variable if v == stackPointer => + res ++= mmm.findStackObject(0) + res + case v: Variable => + evaluateExpressionWithSSA(expr, constantProp(n), n, reachingDefs).foreach { b => + Logger.debug("BitVecLiteral: " + b) + val region = mmm.findDataObject(b.value) + if (region.isDefined) { + res += region.get + } + } + if (res.isEmpty) { // may be passed as param + val ctx = getUse(v, n, reachingDefs) + for (i <- ctx) { + i.rhs match { + case load: MemoryLoad => // treat as a region + res ++= exprToRegion(load.index, i) + case binaryExpr: BinaryExpr => + res ++= reducibleToRegion(binaryExpr, i) + res ++= exprToRegion(i.rhs, i) + case _ => // also treat as a region (for now) even if just Base + Offset without memLoad + res ++= exprToRegion(i.rhs, i) + } + } + } + res + case _ => + evaluateExpressionWithSSA(expr, constantProp(n), n, reachingDefs).foreach { b => + Logger.debug("BitVecLiteral: " + b) + val region = mmm.findDataObject(b.value) + if (region.isDefined) { + res += region.get + } + } + res + } + } + + /** Default implementation of eval. + */ + def eval(expr: Expr, cmd: Command): Expr = { + expr match + case literal: Literal => literal // ignore literals + case Extract(end, start, body) => + Extract(end, start, eval(body, cmd)) + case Repeat(repeats, body) => + Repeat(repeats, eval(body, cmd)) + case ZeroExtend(extension, body) => + ZeroExtend(extension, eval(body, cmd)) + case SignExtend(extension, body) => + SignExtend(extension, eval(body, cmd)) + case UnaryExpr(op, arg) => + UnaryExpr(op, eval(arg, cmd)) + case BinaryExpr(op, arg1, arg2) => + BinaryExpr(op, eval(arg1, cmd), eval(arg2, cmd)) + case MemoryStore(mem, index, value, endian, size) => + // TODO: index should be replaced region + val regions = exprToRegion(eval(index, cmd), cmd) + if (regions.size == 1) { + MemoryStore(Memory(regions.head.regionIdentifier, mem.addressSize, mem.valueSize), eval(index, cmd), eval(value, cmd), endian, size) + } else if (regions.size > 1) { + Logger.warn(s"MemStore is: ${cmd}") + Logger.warn(s"Multiple regions found for memory store: ${regions}") + expr + } else { + Logger.warn(s"No region found for memory store") + expr + } + case MemoryLoad(mem, index, endian, size) => + // TODO: index should be replaced region + val regions = exprToRegion(eval(index, cmd), cmd) + if (regions.size == 1) { + MemoryLoad(Memory(regions.head.regionIdentifier, mem.addressSize, mem.valueSize), eval(index, cmd), endian, size) + } else if (regions.size > 1) { + Logger.warn(s"MemLoad is: ${cmd}") + Logger.warn(s"Multiple regions found for memory load: ${regions}") + expr + } else { + Logger.warn(s"No region found for memory load") + expr + } + case Memory(name, addressSize, valueSize) => + expr // ignore memory + case variable: Variable => variable // ignore variables + } + + /** Transfer function for state lattice elements. + */ + def localTransfer(n: CFGPosition): Unit = n match { + case cmd: Command => + cmd match + case statement: Statement => statement match + case assign: LocalAssign => + assign.rhs = eval(assign.rhs, cmd) + case mAssign: MemoryAssign => + mAssign.lhs = eval(mAssign.lhs, cmd).asInstanceOf[Memory] + mAssign.rhs = eval(mAssign.rhs, cmd).asInstanceOf[MemoryStore] + case nop: NOP => // ignore NOP + case assert: Assert => + assert.body = eval(assert.body, cmd) + case assume: Assume => + assume.body = eval(assume.body, cmd) + case jump: Jump => jump match + case to: GoTo => // ignore GoTo + case call: Call => call match + case call: DirectCall => // ignore DirectCall + case call: IndirectCall => // ignore IndirectCall + case _ => // ignore other kinds of nodes + } +} \ No newline at end of file diff --git a/src/main/scala/analysis/UtilMethods.scala b/src/main/scala/analysis/UtilMethods.scala index 2d6c54090..0b9a9c32e 100644 --- a/src/main/scala/analysis/UtilMethods.scala +++ b/src/main/scala/analysis/UtilMethods.scala @@ -133,10 +133,6 @@ def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapper val result = evaluateExpressionWithSSA(e.body, constantPropResult, n, reachingDefs) applySingle(BitVectorEval.boogie_extract(e.end, e.start, _: BitVecLiteral), result) case variable: Variable => - Logger.debug("Variable: " + variable) - Logger.debug("node: " + n) - Logger.debug("reachingDefs: " + reachingDefs(n)) - Logger.debug("getUse: " + getUse(variable, n, reachingDefs)) constantPropResult(RegisterWrapperEqualSets(variable, getUse(variable, n, reachingDefs))) case b: BitVecLiteral => Set(b) case _ => throw RuntimeException("ERROR: CASE NOT HANDLED: " + exp + "\n") @@ -153,6 +149,25 @@ def getUse(variable: Variable, node: CFGPosition, reachingDefs: Map[CFGPosition, out.getOrElse(variable, Set()) } +///** +// * In expressions that have accesses within a region, we need to relocate +// * the base address to the actual address using the relocation table. +// * MUST RELOCATE because MMM iterate to find the lowest address +// * TODO: May need to iterate over the relocation table to find the actual address +// * +// * @param address +// * @param globalOffsets +// * @return BitVecLiteral: the relocated address +// */ +//def relocatedBase(address: BitVecLiteral, globalOffsets: Map[BigInt, BigInt]): BitVecLiteral = { +// val tableAddress = globalOffsets.getOrElse(address.value, address.value) +// // this condition checks if the address is not layered and returns if it is not +// if (tableAddress != address.value && !globalOffsets.contains(tableAddress)) { +// return address +// } +// BitVecLiteral(tableAddress, address.size) +//} + def unwrapExpr(expr: Expr): Set[Expr] = { var buffers: Set[Expr] = Set() expr match { diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index d7dba1c42..56482704e 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -499,7 +499,7 @@ object IRTransform { regionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])] ): Unit = { - + // iterate over all commands - if call is to pthread_create, look up? for (p <- program.procedures) { for (b <- p.blocks) { @@ -601,6 +601,7 @@ object StaticAnalysis { config: StaticAnalysisConfig, iteration: Int ): StaticAnalysisContext = { + val before = System.nanoTime() val IRProgram: Program = ctx.program val externalFunctions: Set[ExternalFunction] = ctx.externalFunctions val globals: Set[SpecGlobal] = ctx.globals @@ -621,7 +622,6 @@ object StaticAnalysis { Logger.info("Subroutine Addresses:") Logger.info(subroutines) - // reducible loops val detector = LoopDetector(IRProgram) val foundLoops = detector.identify_loops() @@ -631,6 +631,8 @@ object StaticAnalysis { val newLoops = transformer.llvm_transform() newLoops.foreach(l => Logger.info(s"Loop found: ${l.name}")) + println(s"Finished Loop Transform at ${(System.nanoTime() - before)/1000000} ms") + config.analysisDotPath.foreach { s => val newCFG = ProgramCfgFactory().fromIR(IRProgram) writeToFile(newCFG.toDot(x => x.toString, Output.dotIder), s"${s}_resolvedCFG-reducible.dot") @@ -641,16 +643,22 @@ object StaticAnalysis { val cfg = ProgramCfgFactory().fromIR(IRProgram) + println(s"Finished CFG gen at ${(System.nanoTime() - before) / 1000000} ms") + val domain = computeDomain(IntraProcIRCursor, IRProgram.procedures) Logger.info("[!] Running ANR") val ANRSolver = ANRAnalysisSolver(IRProgram) val ANRResult = ANRSolver.analyze() + println(s"Finished ANR at ${(System.nanoTime() - before) / 1000000} ms") + Logger.info("[!] Running RNA") val RNASolver = RNAAnalysisSolver(IRProgram) val RNAResult = RNASolver.analyze() + println(s"Finished RNA at ${(System.nanoTime() - before) / 1000000} ms") + Logger.info("[!] Running Constant Propagation") val constPropSolver = ConstantPropagationSolver(IRProgram) val constPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = constPropSolver.analyze() @@ -660,9 +668,14 @@ object StaticAnalysis { val specGlobalAddresses = ctx.specification.globals.map(s => s.address -> s.name).toMap var varDepsSummaries = VariableDependencyAnalysis(IRProgram, ctx.specification.globals, specGlobalAddresses, constPropResult, scc).analyze() + println(s"Finished ConstProp at ${(System.nanoTime() - before) / 1000000} ms") + + Logger.info("[!] Running IR Simple Value Analysis") val ilcpsolver = IRSimpleValueAnalysis.Solver(IRProgram) val newCPResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = ilcpsolver.analyze() + println(s"Finished IR Simple Value Analysis at ${(System.nanoTime() - before) / 1000000} ms") + config.analysisResultsPath.foreach(s => writeToFile(printAnalysisResults(IRProgram, newCPResult), s"${s}_new_ir_constprop$iteration.txt") ) @@ -672,9 +685,12 @@ object StaticAnalysis { writeToFile(toDot(dumpdomain, InterProcIRCursor, Map.empty), s"${f}_new_ir_intercfg$iteration.dot") }) + Logger.info("[!] Running Reaching Definitions Analysis") val reachingDefinitionsAnalysisSolver = ReachingDefinitionsAnalysisSolver(IRProgram) val reachingDefinitionsAnalysisResults = reachingDefinitionsAnalysisSolver.analyze() + println(s"Finished reaching definitions at ${(System.nanoTime() - before) / 1000000} ms") + config.analysisDotPath.foreach(s => { writeToFile( toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> reachingDefinitionsAnalysisResults(b).toString).toMap), @@ -687,6 +703,8 @@ object StaticAnalysis { val regionAccessesAnalysisSolver = RegionAccessesAnalysisSolver(cfg, constPropResult, reachingDefinitionsAnalysisResults) val regionAccessesAnalysisResults = regionAccessesAnalysisSolver.analyze() + println(s"Finished region accesses at ${(System.nanoTime() - before) / 1000000} ms") + config.analysisDotPath.foreach(s => writeToFile(cfg.toDot(Output.labeler(regionAccessesAnalysisResults, true), Output.dotIder), s"${s}_RegTo$iteration.dot")) config.analysisResultsPath.foreach(s => writeToFile(printAnalysisResults(cfg, regionAccessesAnalysisResults, iteration), s"${s}_RegTo$iteration.txt")) @@ -694,10 +712,28 @@ object StaticAnalysis { val constPropSolverWithSSA = ConstantPropagationSolverWithSSA(IRProgram, reachingDefinitionsAnalysisResults) val constPropResultWithSSA = constPropSolverWithSSA.analyze() + println(s"Finished ConstProp with SSA at ${(System.nanoTime() - before) / 1000000} ms") + Logger.info("[!] Running MRA") val mraSolver = MemoryRegionAnalysisSolver(IRProgram, globalAddresses, globalOffsets, mergedSubroutines, constPropResult, ANRResult, RNAResult, regionAccessesAnalysisResults, reachingDefinitionsAnalysisResults) val mraResult = mraSolver.analyze() + Logger.info("[!] Running MMM") + val mmm = MemoryModelMap() + mmm.convertMemoryRegions(mraResult, mergedSubroutines, globalOffsets, mraSolver.procedureToSharedRegions) + mmm.logRegions() + + Logger.info("[!] Injecting regions") + val regionInjector = RegionInjector(domain, constPropResultWithSSA, mmm, reachingDefinitionsAnalysisResults, globalOffsets) + regionInjector.nodeVisitor() + + Logger.info("[!] Running Steensgaard") + val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResultWithSSA, regionAccessesAnalysisResults, mmm, reachingDefinitionsAnalysisResults, globalOffsets) + steensgaardSolver.analyze() + val steensgaardResults = steensgaardSolver.pointsTo() + val memoryRegionContents = steensgaardSolver.getMemoryRegionContents + mmm.logRegions(memoryRegionContents) + config.analysisDotPath.foreach(s => { writeToFile(dotCallGraph(IRProgram), s"${s}_callgraph$iteration.dot") writeToFile( @@ -716,18 +752,6 @@ object StaticAnalysis { ) }) - Logger.info("[!] Running MMM") - val mmm = MemoryModelMap() - mmm.convertMemoryRegions(mraResult, mergedSubroutines, globalOffsets, mraSolver.procedureToSharedRegions) - mmm.logRegions() - - Logger.info("[!] Running Steensgaard") - val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResultWithSSA, regionAccessesAnalysisResults, mmm, reachingDefinitionsAnalysisResults, globalOffsets) - steensgaardSolver.analyze() - val steensgaardResults = steensgaardSolver.pointsTo() - val memoryRegionContents = steensgaardSolver.getMemoryRegionContents - mmm.logRegions(memoryRegionContents) - Logger.info("[!] Running VSA") val vsaSolver = ValueSetAnalysisSolver(IRProgram, globalAddresses, externalAddresses, globalOffsets, subroutines, mmm, constPropResult) From cee69a2f2f6827e9104877e95dcd40a5488ceb26 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Fri, 17 May 2024 16:09:55 +1000 Subject: [PATCH 032/104] Loop handling in MRA (using depth for now) --- .../InterprocSteensgaardAnalysis.scala | 11 ++- .../scala/analysis/MemoryRegionAnalysis.scala | 87 +++++++++++-------- src/main/scala/analysis/RegionInjector.scala | 13 ++- .../analysis/solvers/FixPointSolver.scala | 3 +- src/main/scala/util/RunUtils.scala | 2 +- 5 files changed, 71 insertions(+), 45 deletions(-) diff --git a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala index 38153e277..93df47156 100644 --- a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala +++ b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala @@ -108,7 +108,8 @@ class InterprocSteensgaardAnalysis( var reducedRegions = Set.empty[MemoryRegion] binExpr.arg1 match { case variable: Variable => - evaluateExpressionWithSSA(binExpr, constantProp(n), n, reachingDefs).foreach { b => + val a = evaluateExpressionWithSSA(binExpr, constantProp(n), n, reachingDefs) + a.foreach { b => val region = mmm.findDataObject(b.value) reducedRegions = reducedRegions ++ region } @@ -153,9 +154,11 @@ class InterprocSteensgaardAnalysis( } { r match { case stackRegion: StackRegion => - val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) - evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => - reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, b2), n) + if (b.size == stackRegion.start.size) { + val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) + evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => + reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, b2), n) + } } case dataRegion: DataRegion => Logger.debug(s"Hey, I'm a data region: $dataRegion") diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 65aa1cc20..d078af751 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -15,7 +15,8 @@ trait MemoryRegionAnalysis(val program: Program, val ANRResult: Map[CFGPosition, Set[Variable]], val RNAResult: Map[CFGPosition, Set[Variable]], val regionAccesses: Map[CfgNode, Map[RegisterVariableWrapper, FlatElement[Expr]]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]) { + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + val maxDepth: Int) { var mallocCount: Int = 0 private var stackCount: Int = 0 @@ -90,9 +91,19 @@ trait MemoryRegionAnalysis(val program: Program, // TODO: this could be used instead of regionAccesses in other analyses to reduce the Expr to region conversion private val registerToRegions: mutable.Map[RegisterVariableWrapper, mutable.Set[MemoryRegion]] = mutable.Map() val procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]] = mutable.Map() + var depthMap: mutable.Map[CFGPosition, Int] = mutable.Map() def reducibleToRegion(binExpr: BinaryExpr, n: Command): Set[MemoryRegion] = { var reducedRegions = Set.empty[MemoryRegion] + if (depthMap.contains(n)) { + if (depthMap(n) > maxDepth) { + depthMap += (n -> 0) + return reducedRegions + } + } else { + depthMap += (n -> 0) + } + depthMap(n) += 1 binExpr.arg1 match { case variable: Variable => val ctx = getUse(variable, n, reachingDefs) @@ -103,6 +114,8 @@ trait MemoryRegionAnalysis(val program: Program, case _: BitVecLiteral => Set.empty case _ => + println(s"OG $n") + println(s"Unreducible: $i") eval(i.rhs, Set.empty, i) } evaluateExpression(binExpr.arg2, constantProp(n)) match { @@ -126,22 +139,24 @@ trait MemoryRegionAnalysis(val program: Program, } def eval(exp: Expr, env: Set[MemoryRegion], n: Command): Set[MemoryRegion] = { - Logger.debug(s"evaluating $exp") - Logger.debug(s"env: $env") - Logger.debug(s"n: $n") + println(s"Asked to evaluate: $exp at ${n.label}") exp match { case binOp: BinaryExpr => if (spList.contains(binOp.arg1)) { evaluateExpression(binOp.arg2, constantProp(n)) match { case Some(b: BitVecLiteral) => Set(poolMaster(b, IRWalk.procedure(n))) - case None => env + case None => throw RuntimeException(s"This should be reducible: $exp") } - } else if (reducibleToRegion(binOp, n).nonEmpty) { - reducibleToRegion(binOp, n) } else { - evaluateExpression(binOp, constantProp(n)) match { - case Some(b: BitVecLiteral) => eval(b, env, n) - case None => env + val reduced = reducibleToRegion(binOp, n) + if (reduced.nonEmpty) { + println(s"Reducible: exp $exp") + reduced + } else { + evaluateExpression(binOp, constantProp(n)) match { + case Some(b: BitVecLiteral) => eval(b, env, n) + case None => eval(binOp.arg1, env, n) ++ eval(binOp.arg2, env, n) + } } } case variable: Variable => @@ -149,7 +164,7 @@ trait MemoryRegionAnalysis(val program: Program, case _: LocalVar => env case reg: Register if spList.contains(reg) => - eval(BitVecLiteral(0, 64), env, n) + Set(poolMaster(BitVecLiteral(0, 64), IRWalk.procedure(n))) case _ => evaluateExpression(variable, constantProp(n)) match { case Some(b: BitVecLiteral) => @@ -162,11 +177,23 @@ trait MemoryRegionAnalysis(val program: Program, eval(memoryLoad.index, env, n) // ignore case where it could be a global region (loaded later in MMM from relf) case b: BitVecLiteral => - Set(poolMaster(b, IRWalk.procedure(n))) - // we cannot evaluate this to a concrete value, we need VSA for this - case _ => - Logger.debug(s"type: ${exp.getClass} $exp\n") - throw new Exception("Unknown type") + env + case literal: Literal => // ignore literals other than BitVectors + env + case extract: Extract => + eval(extract.body, env, n) + case repeat: Repeat => + eval(repeat.body, env, n) + case zeroExtend: ZeroExtend => + eval(zeroExtend.body, env, n) + case signExtend: SignExtend => + eval(signExtend.body, env, n) + case unaryExpr: UnaryExpr => + eval(unaryExpr.arg, env, n) + case memoryStore: MemoryAssign => + eval(memoryStore.index, env, n) ++ eval(memoryStore.value, env, n) + case memory: Memory => + env } } @@ -174,6 +201,7 @@ trait MemoryRegionAnalysis(val program: Program, */ def localTransfer(n: CFGPosition, s: Set[MemoryRegion]): Set[MemoryRegion] = n match { case cmd: Command => + println(s"N: $n") cmd match { case directCall: DirectCall => val ANR = ANRResult(cmd) @@ -202,22 +230,12 @@ trait MemoryRegionAnalysis(val program: Program, s } case memAssign: MemoryAssign => - if (ignoreRegions.contains(memAssign.value)) { - s - } else { - val result = eval(memAssign.index, s, cmd) - regionLattice.lub(s, result) - } - case assign: Assign => - stackDetection(assign) - var m = s - unwrapExpr(assign.rhs).foreach { - case memoryLoad: MemoryLoad => - val result = eval(memoryLoad.index, s, cmd) - m = regionLattice.lub(m, result) - case _ => m - } - m + val result = eval(memAssign.index, s, cmd) + regionLattice.lub(s, result) + case localAssign: Assign => + stackDetection(localAssign) + val result = eval(localAssign.rhs, s, cmd) + regionLattice.lub(s, result) case _ => s } case _ => s // ignore other kinds of nodes @@ -235,8 +253,9 @@ class MemoryRegionAnalysisSolver( ANRResult: Map[CFGPosition, Set[Variable]], RNAResult: Map[CFGPosition, Set[Variable]], regionAccesses: Map[CfgNode, Map[RegisterVariableWrapper, FlatElement[Expr]]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])] - ) extends MemoryRegionAnalysis(program, globals, globalOffsets, subroutines, constantProp, ANRResult, RNAResult, regionAccesses, reachingDefs) + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + maxDepth: Int + ) extends MemoryRegionAnalysis(program, globals, globalOffsets, subroutines, constantProp, ANRResult, RNAResult, regionAccesses, reachingDefs, maxDepth) with IRIntraproceduralForwardDependencies with Analysis[Map[CFGPosition, LiftedElement[Set[MemoryRegion]]]] with WorklistFixpointSolverWithReachability[CFGPosition, Set[MemoryRegion], PowersetLattice[MemoryRegion]] { diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index db7c95b1d..63358aeab 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -90,9 +90,13 @@ class RegionInjector(domain: mutable.Set[CFGPosition], } { r match { case stackRegion: StackRegion => - val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) - evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => - reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, b2), n) + println(s"StackRegion: ${stackRegion.start}") + println(s"BitVecLiteral: ${b}") + if (b.size == stackRegion.start.size) { + val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) + evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => + reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, b2), n) + } } case dataRegion: DataRegion => val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start, globalOffsets), b) @@ -159,7 +163,6 @@ class RegionInjector(domain: mutable.Set[CFGPosition], res ++= exprToRegion(load.index, i) case binaryExpr: BinaryExpr => res ++= reducibleToRegion(binaryExpr, i) - res ++= exprToRegion(i.rhs, i) case _ => // also treat as a region (for now) even if just Base + Offset without memLoad res ++= exprToRegion(i.rhs, i) } @@ -205,6 +208,7 @@ class RegionInjector(domain: mutable.Set[CFGPosition], Logger.warn(s"Multiple regions found for memory store: ${regions}") expr } else { + Logger.warn(s"MemStore is: ${cmd}") Logger.warn(s"No region found for memory store") expr } @@ -218,6 +222,7 @@ class RegionInjector(domain: mutable.Set[CFGPosition], Logger.warn(s"Multiple regions found for memory load: ${regions}") expr } else { + Logger.warn(s"MemLoad is: ${cmd}") Logger.warn(s"No region found for memory load") expr } diff --git a/src/main/scala/analysis/solvers/FixPointSolver.scala b/src/main/scala/analysis/solvers/FixPointSolver.scala index bae5f8b9d..29186a9eb 100644 --- a/src/main/scala/analysis/solvers/FixPointSolver.scala +++ b/src/main/scala/analysis/solvers/FixPointSolver.scala @@ -249,9 +249,8 @@ trait PushDownWorklistFixpointSolver[N, T, L <: Lattice[T]] extends MapLatticeSo def process(n: N): Unit = val xn = x(n) val y = transfer(n, xn) + // TODO: Only propagate if there's a change for succ <- outdep(n) do propagate(y, succ) - - /** Worklist-based fixpoint solver. * * @tparam N diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 56482704e..8d658d4ec 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -715,7 +715,7 @@ object StaticAnalysis { println(s"Finished ConstProp with SSA at ${(System.nanoTime() - before) / 1000000} ms") Logger.info("[!] Running MRA") - val mraSolver = MemoryRegionAnalysisSolver(IRProgram, globalAddresses, globalOffsets, mergedSubroutines, constPropResult, ANRResult, RNAResult, regionAccessesAnalysisResults, reachingDefinitionsAnalysisResults) + val mraSolver = MemoryRegionAnalysisSolver(IRProgram, globalAddresses, globalOffsets, mergedSubroutines, constPropResult, ANRResult, RNAResult, regionAccessesAnalysisResults, reachingDefinitionsAnalysisResults, maxDepth = 3) val mraResult = mraSolver.analyze() Logger.info("[!] Running MMM") From aee3d46e87dafff2c2e975f639e82cac034c72be Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Mon, 17 Jun 2024 11:14:18 +1000 Subject: [PATCH 033/104] Initial Works on VSA --- src/main/scala/analysis/ActualVSA.scala | 311 ++++++++++++++++++ src/main/scala/analysis/BitVectorEval.scala | 20 +- .../scala/analysis/IrreducibleLoops.scala | 2 +- .../analysis/LoopConditionEvaluator.scala | 13 + src/main/scala/analysis/MemoryModelMap.scala | 3 + src/main/scala/analysis/UtilMethods.scala | 3 - src/main/scala/util/RunUtils.scala | 29 +- 7 files changed, 370 insertions(+), 11 deletions(-) create mode 100644 src/main/scala/analysis/ActualVSA.scala create mode 100644 src/main/scala/analysis/LoopConditionEvaluator.scala diff --git a/src/main/scala/analysis/ActualVSA.scala b/src/main/scala/analysis/ActualVSA.scala new file mode 100644 index 000000000..b4ec94071 --- /dev/null +++ b/src/main/scala/analysis/ActualVSA.scala @@ -0,0 +1,311 @@ +package analysis +import ir._ +import util._ +import scala.collection.mutable +import analysis.BitVectorEval._ + +class ActualVSA(program: Program, + constantPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], + mmm: MemoryModelMap) { + + enum Flag { + case CF // Carry Flag + case ZF // Zero Flag + case SF // Sign Flag + case PF // Parity Flag + case AF // Auxiliary Flag + case OF // Overflow Flag + } + + enum Bool3 { + case True + case False + case Maybe + } + + case class StridedInterval(s: BitVecLiteral, lb: BitVecLiteral, ub: BitVecLiteral) { + require(smt_bvule(lb, ub) == TrueLiteral, "Lower bound must be less than or equal to upper bound") + + // Meaning of a strided interval + def gamma: Set[BitVecLiteral] = { + smt_interval(lb, ub, s) + } + + override def toString: String = { + s"$s[$lb, $ub]" + } + + // Addition + def +(that: StridedInterval): StridedInterval = { + val newLb = smt_bvadd(this.lb, that.lb) + val newUb = smt_bvadd(this.ub, that.ub) + val newS = gcd(this.s, that.s) + StridedInterval(newS, newLb, newUb) + } + + // Bitwise NOT + def unary_~ : StridedInterval = { + StridedInterval(s, smt_bvnot(ub), smt_bvnot(lb)) + } + + // Bitwise AND + def &(that: StridedInterval): StridedInterval = { + val lbAnd = smt_bvand(this.lb, that.lb) + val ubAnd = smt_bvand(this.ub, that.ub) + StridedInterval(gcd(this.s, that.s), lbAnd, ubAnd) + } + + // join of two or more strided intervals + def join(that: StridedInterval): StridedInterval = { + val newLb = smt_min(this.lb, that.lb) + val newUb = smt_max(this.ub, that.ub) + val newS = gcd(this.s, that.s) + StridedInterval(newS, newLb, newUb) + } + + // Helper function to compute the greatest common divisor + private def gcd(a: BitVecLiteral, b: BitVecLiteral): BitVecLiteral = { + if (b.value == 0) a else gcd(b, smt_bvsmod(a, b)) + } + } + + + /** + * ValueSet class that represents a set of values. + * s is the stride + * l is the lower bound + * u is the upper bound + * [l, u] is the interval + * [l, u] \ s is the set of values + * 0[l,l] represents the singleton set {l} + */ + case class ValueSet(intervals: Set[StridedInterval]) { + + def gamma: Set[BitVecLiteral] = { + intervals.flatMap(_.gamma) + } + + // Union of two value sets + def union(that: ValueSet): ValueSet = { + ValueSet(this.intervals ++ that.intervals) + } + + // Intersection of two value sets + def intersect(that: ValueSet): ValueSet = { + val newIntervals = for { + a <- this.intervals + b <- that.intervals + inter = intersectIntervals(a, b) if inter.isDefined + } yield inter.get + ValueSet(newIntervals) + } + + // Intersection of two strided intervals + private def intersectIntervals(a: StridedInterval, b: StridedInterval): Option[StridedInterval] = { + val newLb = smt_max(a.lb, b.lb) + val newUb = smt_min(a.ub, b.ub) + val newS = smt_gcd(a.s, b.s) + if (smt_bvule(newLb, newUb) == TrueLiteral) Some(StridedInterval(newS, newLb, newUb)) else None + } + + // Addition of value sets + def +(that: ValueSet): ValueSet = { + val newIntervals = for { + a <- this.intervals + b <- that.intervals + } yield a + b + ValueSet(newIntervals) + } + + // Addition of a constant to a value set + def +(c: BitVecLiteral): ValueSet = { + val newIntervals = for { + a <- this.intervals + } yield StridedInterval(a.s, smt_bvadd(a.lb, c), smt_bvadd(a.ub, c)) // TODO: Should Stride change? + ValueSet(newIntervals) + } + } + + // top element of the lattice + private object ValueSetLattice { + val TOP: ValueSet = ValueSet(Set(StridedInterval(BitVecLiteral(BigInt(1), 64), BitVecLiteral(BigInt(0), 64), BitVecLiteral(BigInt(Long.MaxValue), 64)))) + val BOTTOM: ValueSet = ValueSet(Set()) + } + + + case class AlocEnv(R: MemoryRegion) + //private type AbsEnv = mutable.Map[Variable | MemoryRegion, ValueSet] | mutable.Map[MemoryRegion, AlocEnv] | mutable.Map[Flag, Bool3] + //private type AbsEnv = mutable.Map[Variable | MemoryRegion | Flag, ValueSet | AlocEnv | Bool3] + case class AbsEnv( + env1: mutable.Map[Variable | MemoryRegion, ValueSet], + env2: mutable.Map[MemoryRegion, AlocEnv], + env3: mutable.Map[Flag, Bool3] + ): + def join(that: AbsEnv): AbsEnv = { + AbsEnv( + env1 ++ that.env1, + env2 ++ that.env2, + env3 ++ that.env3 + ) + } + + /** + * ∗(vs, s): Returns a pair of sets (F, P). F represents the set of “fully accessed” a-locs: it + * consists of the a-locs that are of size s and whose starting addresses are in vs. P represents + * the set of “partially accessed” a-locs: it consists of (i) a-locs whose starting addresses are in + * vs but are not of size s, and (ii) a-locs whose addresses are in vs but whose starting addresses + * and sizes do not meet the conditions to be in F. [Reference VSA paper] + * + * @param vsR2 + * @param s + * @return + */ + private def dereference(vsR2: ValueSet, s: Int): (Set[MemoryRegion], Set[MemoryRegion]) = { + // TODO: size of dereference s is ignored (maybe it can be used to check overflows?) + // TODO: Global memory size can be retrieved from the symbol table and are of size s + // Map addresses to exact memory locations + val fullyAccessedLocations = vsR2.gamma.flatMap(address => mmm.findStackObject(address.value)) + + // Identify partially accessed locations (if any) + val partiallyAccessedLocations = vsR2.gamma.flatMap(address => mmm.findStackPartialAccessesOnly(address.value)) + + // Return the set of fully accessed locations and the set of partially accessed locations + (fullyAccessedLocations.diff(partiallyAccessedLocations).asInstanceOf[Set[MemoryRegion]], partiallyAccessedLocations.asInstanceOf[Set[MemoryRegion]]) + } + + private def RemoveLowerBounds(vs: ValueSet): ValueSet = { + val newIntervals = for { + a <- vs.intervals + } yield StridedInterval(a.s, BitVecLiteral(BigInt(0), a.ub.size), a.ub) + ValueSet(newIntervals) + } + + private def RemoveUpperBounds(vs: ValueSet): ValueSet = { + val newIntervals = for { + a <- vs.intervals + } yield StridedInterval(a.s, a.lb, BitVecLiteral(BigInt(Long.MaxValue), a.lb.size)) + ValueSet(newIntervals) + } + + private def joinValueSets(vs1: ValueSet, vs2: ValueSet): ValueSet = { + vs1.union(vs2) + } + + private def meetValueSets(vs1: ValueSet, vs2: ValueSet): ValueSet = { + vs1.intersect(vs2) + } + + def AbstractTransformer(in: AbsEnv, instruction: CFGPosition): AbsEnv = { + instruction match { + case p: Procedure => in + case b: Block => in + case c: Command => + c match + case statement: Statement => + statement match + case localAssign: LocalAssign => + localAssign.rhs match + case binOp: BinaryExpr => + if (binOp.arg1.isInstanceOf[Variable]) { + val R1 = localAssign.lhs + val R2 = binOp.arg1.asInstanceOf[Variable] + val c = evaluateExpression(binOp.arg2, constantPropResult(instruction)) + + // R1 = R2 + c + val out = in + val vs_R2: ValueSet = in.env1.getOrElseUpdate(R2, ValueSetLattice.BOTTOM) + out.env1(R1) = vs_R2 + c.get + out + } else { + in + } + case memoryLoad: MemoryLoad => + memoryLoad.index match + case binOp: BinaryExpr => + if (binOp.arg2.isInstanceOf[Variable]) { + val R1 = localAssign.lhs + val R2 = binOp.arg1.asInstanceOf[Variable] // TODO: Is R2 always a variable? + val out = in + getDefinition(binOp.arg2.asInstanceOf[Variable], instruction, reachingDefs).foreach { + case d: LocalAssign => + d.rhs match + case binOp2: BinaryExpr => + val c1 = evaluateExpression(binOp2.arg1, constantPropResult(instruction)) + val c2 = evaluateExpression(binOp2.arg2, constantPropResult(instruction)) + // R1 = *(R2 + c1) + c2 + val vs_R2: ValueSet = in.env1(R2) + val s = c2.get.size // TODO: s is the size of dereference performed by the instruction (I assume it is the same size as c2) + val (f: Set[MemoryRegion], p: Set[MemoryRegion]) = dereference(vs_R2 + c1.get, s) + if (p.isEmpty) { + val vs_rhs = f.map(in.env1(_)).reduce(joinValueSets) + out.env1(R1) = vs_rhs + c2.get + } else { + out.env1(R1) = ValueSetLattice.TOP + } + case _ => out + } + out + } else { + in + } + case _ => in // TODO: Handle other cases + case variable: Variable => + val R1 = localAssign.lhs + val R2 = variable + // R1 >= R2 + val out = in + val vs_R1 = in.env1.getOrElseUpdate(R1, ValueSetLattice.BOTTOM) + val vs_R2 = in.env1(R2) + val vs_lb = RemoveUpperBounds(vs_R2) + val vs_ub = RemoveLowerBounds(vs_R1) + out.env1(R1) = vs_R1.intersect(vs_lb) + out.env1(R2) = vs_R2.intersect(vs_ub) + out + case bitVecLiteral: BitVecLiteral => + val R1 = localAssign.lhs + val c = bitVecLiteral + // R1 <= c + val vs_c = ValueSet(Set(StridedInterval(smt_gcd(BitVecLiteral(BigInt(0), c.size), c), BitVecLiteral(BigInt(0), c.size), c))) // TODO: Fix ME + val out = in + out.env1(R1) = meetValueSets(in.env1(R1), vs_c) + out + case _ => in // TODO: Handle other cases + case memoryAssign: MemoryAssign => in // TODO: *(R1 + c1) = R2 + c2 + case nop: NOP => in + case assert: Assert => in + case assume: Assume => in + case jump: Jump => in + } + } + + def IntraProceduralVSA(): mutable.Map[CFGPosition, AbsEnv] = { + val worklist = new mutable.Queue[CFGPosition]() + worklist.enqueue(program.mainProcedure) + val absEnv_enter = AbsEnv(mutable.Map().withDefault(_ => ValueSetLattice.BOTTOM), mutable.Map(), mutable.Map()) + val abstractStates = mutable.Map[CFGPosition, AbsEnv](worklist.head -> absEnv_enter) + while(worklist.nonEmpty) { + val n: CFGPosition = worklist.dequeue() + val m = IntraProcIRCursor.succ(n) + for (succ <- m) { + val edge_amc = AbstractTransformer(abstractStates(n), succ) + Propagate(succ, edge_amc) + } + } + + def Propagate(n: CFGPosition, edge_amc: AbsEnv): Unit = { + if (!abstractStates.contains(n)) { + abstractStates(n) = edge_amc + worklist.enqueue(n) + } else { + val oldEnv = abstractStates(n) + val newEnv = oldEnv.join(edge_amc) + if (newEnv != oldEnv) { + abstractStates(n) = newEnv + worklist.enqueue(n) + } + } + } + abstractStates + } +} diff --git a/src/main/scala/analysis/BitVectorEval.scala b/src/main/scala/analysis/BitVectorEval.scala index 0b5847506..a3da4de13 100644 --- a/src/main/scala/analysis/BitVectorEval.scala +++ b/src/main/scala/analysis/BitVectorEval.scala @@ -1,7 +1,8 @@ package analysis -import ir._ +import ir.* import analysis.BitVectorEval.* +import scala.annotation.tailrec import scala.math.pow object BitVectorEval { @@ -328,4 +329,21 @@ object BitVectorEval { } } + def smt_min(s: BitVecLiteral, t: BitVecLiteral): BitVecLiteral = { + if (smt_bvslt(s, t) == TrueLiteral) s else t + } + + def smt_max(s: BitVecLiteral, t: BitVecLiteral): BitVecLiteral = { + if (smt_bvslt(s, t) == TrueLiteral) t else s + } + + @tailrec + def smt_gcd(a: BitVecLiteral, b: BitVecLiteral): BitVecLiteral = { + if (b.value == 0) a else smt_gcd(b, smt_bvsmod(a, b)) + } + + def smt_interval(lb: BitVecLiteral, ub: BitVecLiteral, step: BitVecLiteral): Set[BitVecLiteral] = { + require(smt_bvule(lb, ub) == TrueLiteral, "Lower bound must be less than or equal to upper bound") + (lb.value to ub.value by step.value).map(BitVecLiteral(_, lb.size)).toSet + } } diff --git a/src/main/scala/analysis/IrreducibleLoops.scala b/src/main/scala/analysis/IrreducibleLoops.scala index f3d3bb44e..7486cbf44 100644 --- a/src/main/scala/analysis/IrreducibleLoops.scala +++ b/src/main/scala/analysis/IrreducibleLoops.scala @@ -24,7 +24,7 @@ private def label(p: CFGPosition) = { * */ case class LoopEdge(from: CFGPosition, to: CFGPosition) { - override def toString: String = s"(${label(from)}, ${label(to)})" + override def toString: String = s"(${from}, ${to})" } /* A loop is a subgraph of a CFG diff --git a/src/main/scala/analysis/LoopConditionEvaluator.scala b/src/main/scala/analysis/LoopConditionEvaluator.scala new file mode 100644 index 000000000..2659c6ecd --- /dev/null +++ b/src/main/scala/analysis/LoopConditionEvaluator.scala @@ -0,0 +1,13 @@ +//package analysis +//import ir.* +//import util.* +// +//class LoopConditionEvaluator(context: Map[CFGPosition, Map[Variable, Set[BitVecLiteral]]], reachingDefs: Map[CFGPosition, Map[Variable, Set[LocalAssign]]]) { +// def evaluate(loop: Loop): Set[BitVecLiteral] = { +// val loopCondition = loop.condition +// val loopHeader = loop.header +// val loopHeaderContext = context(loopHeader) +// val loopConditionResult = evaluateExpressionWithSSA(loopCondition, loopHeaderContext, loopHeader, reachingDefs) +// loopConditionResult +// } +//} diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index d91340d5b..1636fd085 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -201,6 +201,9 @@ class MemoryModelMap { } } + def findStackPartialAccessesOnly(value: BigInt): Option[StackRegion] = { + stackMap.find((range, _) => range.start < value && value <= range.end).map((range, obj) => obj) + } def findStackObject(value: BigInt): Option[StackRegion] = stackMap.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => obj) diff --git a/src/main/scala/analysis/UtilMethods.scala b/src/main/scala/analysis/UtilMethods.scala index 0b9a9c32e..665acbe8e 100644 --- a/src/main/scala/analysis/UtilMethods.scala +++ b/src/main/scala/analysis/UtilMethods.scala @@ -12,7 +12,6 @@ import util.Logger * The evaluated expression (e.g. 0x69632) */ def evaluateExpression(exp: Expr, constantPropResult: Map[Variable, FlatElement[BitVecLiteral]]): Option[BitVecLiteral] = { - Logger.debug(s"evaluateExpression: $exp") exp match { case binOp: BinaryExpr => val lhs = evaluateExpression(binOp.arg1, constantPropResult) @@ -73,8 +72,6 @@ def evaluateExpression(exp: Expr, constantPropResult: Map[Variable, FlatElement[ } def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], n: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]): Set[BitVecLiteral] = { - Logger.debug(s"evaluateExpression: $exp") - def apply(op: (BitVecLiteral, BitVecLiteral) => BitVecLiteral, a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = { val res = for { x <- a diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 8d658d4ec..1c3db4599 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -625,11 +625,11 @@ object StaticAnalysis { // reducible loops val detector = LoopDetector(IRProgram) val foundLoops = detector.identify_loops() - foundLoops.foreach(l => Logger.info(s"Loop found: ${l.name}")) + foundLoops.foreach(l => Logger.info(s"Loop found: ${l}")) val transformer = LoopTransform(foundLoops) val newLoops = transformer.llvm_transform() - newLoops.foreach(l => Logger.info(s"Loop found: ${l.name}")) + newLoops.foreach(l => Logger.info(s"Loop found: ${l}")) println(s"Finished Loop Transform at ${(System.nanoTime() - before)/1000000} ms") @@ -647,6 +647,11 @@ object StaticAnalysis { val domain = computeDomain(IntraProcIRCursor, IRProgram.procedures) + config.analysisDotPath.foreach { s => + writeToFile(cfg.toDot(x => x.toString, Output.dotIder), s"${s}_preCFG_${iteration}.dot") + writeToFile(printAnalysisResults(IRProgram, Map.empty), s"${s}_preCFG_$iteration.txt") + } + Logger.info("[!] Running ANR") val ANRSolver = ANRAnalysisSolver(IRProgram) val ANRResult = ANRSolver.analyze() @@ -752,10 +757,22 @@ object StaticAnalysis { ) }) - Logger.info("[!] Running VSA") - val vsaSolver = - ValueSetAnalysisSolver(IRProgram, globalAddresses, externalAddresses, globalOffsets, subroutines, mmm, constPropResult) - val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = vsaSolver.analyze() +// Logger.info("[!] Running VSA") +// val vsaSolver = +// ValueSetAnalysisSolver(IRProgram, globalAddresses, externalAddresses, globalOffsets, subroutines, mmm, constPropResult) +// val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = vsaSolver.analyze() + + val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = Map() + + val actualVSA = ActualVSA(IRProgram, constPropResult, reachingDefinitionsAnalysisResults, mmm) + val actualVSAResults: mutable.Map[CFGPosition, actualVSA.AbsEnv] = actualVSA.IntraProceduralVSA() + + config.analysisDotPath.foreach(s => { + writeToFile( + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> actualVSAResults.withDefaultValue(actualVSA.AbsEnv(mutable.Map(), mutable.Map(), mutable.Map())).get(b).toString).toMap), + s"${s}_ActualVSA$iteration.dot" + ) + }) Logger.info("[!] Running Interprocedural Live Variables Analysis") //val interLiveVarsResults = InterLiveVarsAnalysis(IRProgram).analyze() From 12df143551ee5f8b588441f92e5fb0e2097eb069 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Wed, 26 Jun 2024 12:29:42 +1000 Subject: [PATCH 034/104] Changes to partial region accesses --- src/main/scala/analysis/ActualVSA.scala | 83 +++++++++++++------- src/main/scala/analysis/Lattice.scala | 17 ++++ src/main/scala/analysis/MemoryModelMap.scala | 70 ++++++++++++++++- 3 files changed, 139 insertions(+), 31 deletions(-) diff --git a/src/main/scala/analysis/ActualVSA.scala b/src/main/scala/analysis/ActualVSA.scala index b4ec94071..9954f04a3 100644 --- a/src/main/scala/analysis/ActualVSA.scala +++ b/src/main/scala/analysis/ActualVSA.scala @@ -24,6 +24,15 @@ class ActualVSA(program: Program, case Maybe } + /** + * SI class that represents a strided interval + * s is the stride + * l is the lower bound + * u is the upper bound + * [l, u] is the interval + * [l, u] \ s is the set of values + * 0[l,l] represents the singleton set {l} + */ case class StridedInterval(s: BitVecLiteral, lb: BitVecLiteral, ub: BitVecLiteral) { require(smt_bvule(lb, ub) == TrueLiteral, "Lower bound must be less than or equal to upper bound") @@ -55,6 +64,14 @@ class ActualVSA(program: Program, val ubAnd = smt_bvand(this.ub, that.ub) StridedInterval(gcd(this.s, that.s), lbAnd, ubAnd) } + + // Intersection of two strided intervals + def intersect(that: StridedInterval): Option[StridedInterval] = { + val newLb = smt_max(this.lb, that.lb) + val newUb = smt_min(this.ub, that.ub) + val newS = smt_gcd(this.s, that.s) + if (smt_bvule(newLb, newUb) == TrueLiteral) Some(StridedInterval(newS, newLb, newUb)) else None + } // join of two or more strided intervals def join(that: StridedInterval): StridedInterval = { @@ -70,15 +87,9 @@ class ActualVSA(program: Program, } } - /** - * ValueSet class that represents a set of values. - * s is the stride - * l is the lower bound - * u is the upper bound - * [l, u] is the interval - * [l, u] \ s is the set of values - * 0[l,l] represents the singleton set {l} + * A single value set is a map from regions to strided intervals + * @param intervals */ case class ValueSet(intervals: Set[StridedInterval]) { @@ -96,19 +107,11 @@ class ActualVSA(program: Program, val newIntervals = for { a <- this.intervals b <- that.intervals - inter = intersectIntervals(a, b) if inter.isDefined + inter = a.intersect(b) if inter.isDefined } yield inter.get ValueSet(newIntervals) } - // Intersection of two strided intervals - private def intersectIntervals(a: StridedInterval, b: StridedInterval): Option[StridedInterval] = { - val newLb = smt_max(a.lb, b.lb) - val newUb = smt_min(a.ub, b.ub) - val newS = smt_gcd(a.s, b.s) - if (smt_bvule(newLb, newUb) == TrueLiteral) Some(StridedInterval(newS, newLb, newUb)) else None - } - // Addition of value sets def +(that: ValueSet): ValueSet = { val newIntervals = for { @@ -130,15 +133,19 @@ class ActualVSA(program: Program, // top element of the lattice private object ValueSetLattice { val TOP: ValueSet = ValueSet(Set(StridedInterval(BitVecLiteral(BigInt(1), 64), BitVecLiteral(BigInt(0), 64), BitVecLiteral(BigInt(Long.MaxValue), 64)))) - val BOTTOM: ValueSet = ValueSet(Set()) + val BOTTOM: ValueSet = ValueSet(mmm.getAllRegions.map(r => Set())) // TODO: should be all regions mapped to empty set } - case class AlocEnv(R: MemoryRegion) + case class AlocEnv(allocs: Set[MemoryRegion]) { + def join(that: AlocEnv): AlocEnv = { + AlocEnv(this.allocs ++ that.allocs) + } + } //private type AbsEnv = mutable.Map[Variable | MemoryRegion, ValueSet] | mutable.Map[MemoryRegion, AlocEnv] | mutable.Map[Flag, Bool3] //private type AbsEnv = mutable.Map[Variable | MemoryRegion | Flag, ValueSet | AlocEnv | Bool3] case class AbsEnv( - env1: mutable.Map[Variable | MemoryRegion, ValueSet], + env1: mutable.Map[Variable, ValueSet], env2: mutable.Map[MemoryRegion, AlocEnv], env3: mutable.Map[Flag, Bool3] ): @@ -161,14 +168,13 @@ class ActualVSA(program: Program, * @param s * @return */ - private def dereference(vsR2: ValueSet, s: Int): (Set[MemoryRegion], Set[MemoryRegion]) = { - // TODO: size of dereference s is ignored (maybe it can be used to check overflows?) + private def dereference(vsR2: ValueSet, s: BigInt): (Set[MemoryRegion], Set[MemoryRegion]) = { // TODO: Global memory size can be retrieved from the symbol table and are of size s // Map addresses to exact memory locations - val fullyAccessedLocations = vsR2.gamma.flatMap(address => mmm.findStackObject(address.value)) + val fullyAccessedLocations = vsR2.gamma.flatMap(address => mmm.findStackFullAccessesOnly(address.value, s)) // Identify partially accessed locations (if any) - val partiallyAccessedLocations = vsR2.gamma.flatMap(address => mmm.findStackPartialAccessesOnly(address.value)) + val partiallyAccessedLocations = vsR2.gamma.flatMap(address => mmm.findStackPartialAccessesOnly(address.value, s)) // Return the set of fully accessed locations and the set of partially accessed locations (fullyAccessedLocations.diff(partiallyAccessedLocations).asInstanceOf[Set[MemoryRegion]], partiallyAccessedLocations.asInstanceOf[Set[MemoryRegion]]) @@ -228,22 +234,24 @@ class ActualVSA(program: Program, val R2 = binOp.arg1.asInstanceOf[Variable] // TODO: Is R2 always a variable? val out = in getDefinition(binOp.arg2.asInstanceOf[Variable], instruction, reachingDefs).foreach { - case d: LocalAssign => + d => d.rhs match case binOp2: BinaryExpr => val c1 = evaluateExpression(binOp2.arg1, constantPropResult(instruction)) val c2 = evaluateExpression(binOp2.arg2, constantPropResult(instruction)) // R1 = *(R2 + c1) + c2 val vs_R2: ValueSet = in.env1(R2) - val s = c2.get.size // TODO: s is the size of dereference performed by the instruction (I assume it is the same size as c2) - val (f: Set[MemoryRegion], p: Set[MemoryRegion]) = dereference(vs_R2 + c1.get, s) + val s = memoryLoad.size // s is the size of dereference performed by the instruction + val (f: Set[MemoryRegion], p: Set[MemoryRegion]) = dereference(vs_R2 + c1.get, BigInt(s)) + println("VSA") + println(f) if (p.isEmpty) { val vs_rhs = f.map(in.env1(_)).reduce(joinValueSets) out.env1(R1) = vs_rhs + c2.get } else { out.env1(R1) = ValueSetLattice.TOP } - case _ => out + case _ => } out } else { @@ -282,12 +290,29 @@ class ActualVSA(program: Program, def IntraProceduralVSA(): mutable.Map[CFGPosition, AbsEnv] = { val worklist = new mutable.Queue[CFGPosition]() worklist.enqueue(program.mainProcedure) - val absEnv_enter = AbsEnv(mutable.Map().withDefault(_ => ValueSetLattice.BOTTOM), mutable.Map(), mutable.Map()) + val allStackRegions: Set[StackRegion] = mmm.getAllStackRegions() + val allDataRegions: Set[DataRegion] = mmm.getAllDataRegions() + val allHeapRegions: Set[HeapRegion] = mmm.getAllHeapRegions() + + val allocatedStackRegions = AlocEnv(allStackRegions) + val allocatedDataRegions = AlocEnv(allDataRegions) + val allocatedHeapRegions = AlocEnv(allHeapRegions) + + val stackManyToOne = allStackRegions.map(r => r -> allocatedStackRegions).toMap + val dataManyToOne = allDataRegions.map(r => r -> allocatedDataRegions).toMap + val heapManyToOne = allHeapRegions.map(r => r -> allocatedHeapRegions).toMap + + val combinedMap = stackManyToOne ++ dataManyToOne ++ heapManyToOne + val flagsToMaybe = Flag.values.map(f => f -> Bool3.Maybe).toMap + + val absEnv_enter = AbsEnv(mutable.Map().withDefault(_ => ValueSetLattice.BOTTOM), mutable.Map() ++ combinedMap, mutable.Map() ++ flagsToMaybe) val abstractStates = mutable.Map[CFGPosition, AbsEnv](worklist.head -> absEnv_enter) while(worklist.nonEmpty) { val n: CFGPosition = worklist.dequeue() val m = IntraProcIRCursor.succ(n) for (succ <- m) { + mmm.popContext() + mmm.pushContext(IRWalk.procedure(n).name) val edge_amc = AbstractTransformer(abstractStates(n), succ) Propagate(succ, edge_amc) } diff --git a/src/main/scala/analysis/Lattice.scala b/src/main/scala/analysis/Lattice.scala index 0ef98020f..5c3ccd630 100644 --- a/src/main/scala/analysis/Lattice.scala +++ b/src/main/scala/analysis/Lattice.scala @@ -140,6 +140,23 @@ class TupleLattice[L1 <: Lattice[T1], L2 <: Lattice[T2], T1, T2](val lattice1: L override def top: (T1, T2) = (lattice1.top, lattice2.top) } +//trait StridedIntervalLattice[T] extends Lattice[(T, T, T)] { +// override val bottom: (T, T, T) = (???, ???, ???) +// +// override def lub(x: (T1, T2), y: (T1, T2)): (T1, T2) = { +// val (x1, x2) = x +// val (y1, y2) = y +// (lattice1.lub(x1, y1), lattice2.lub(x2, y2)) +// } +// +// override def leq(x: (T1, T2), y: (T1, T2)): Boolean = { +// val (x1, x2) = x +// val (y1, y2) = y +// lattice1.leq(x1, y1) && lattice2.leq(x2, y2) +// } +// +// override def top: (T1, T2) = (lattice1.top, lattice2.top) +//} /** A lattice of maps from a set of elements of type `A` to a lattice with element `L'. Bottom is the default value. */ diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index 1636fd085..fc719d945 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -201,8 +201,74 @@ class MemoryModelMap { } } - def findStackPartialAccessesOnly(value: BigInt): Option[StackRegion] = { - stackMap.find((range, _) => range.start < value && value <= range.end).map((range, obj) => obj) + /* All regions that either: + * 1. starts at value but size less than region size + * 2. starts at value but size more than region size (add both regions ie. next region) + * 3. starts between regions (start, end) and (value + size) => end + * 4. starts between regions (start, end) and (value + size) < end (add both regions ie. next region) + */ + def findStackPartialAccessesOnly(value: BigInt, size: BigInt): Set[StackRegion] = { + val matchingRegions = scala.collection.mutable.Set[StackRegion]() + + stackMap.foreach { case (range, region) => + // Condition 1: Starts at value but size less than region size + if (range.start == value && range.size > size) { + matchingRegions += region + } + // Condition 2: Starts at value but size more than region size (add subsequent regions) + else if (range.start == value && range.size < size) { + matchingRegions += region + var remainingSize = size - range.size + var nextStart = range.end + stackMap.toSeq.sortBy(_._1.start).dropWhile(_._1.start <= range.start).foreach { case (nextRange, nextRegion) => + if (remainingSize > 0) { + matchingRegions += nextRegion + remainingSize -= nextRange.size + nextStart = nextRange.end + } + } + } + // Condition 3: Starts between regions (start, end) and (value + size) => end + else if (range.start < value && (value + size) <= range.end) { + matchingRegions += region + } + // Condition 4: Starts between regions (start, end) and (value + size) < end (add subsequent regions) + else if (range.start < value && (value + size) > range.end) { + matchingRegions += region + var remainingSize = (value + size) - range.end + var nextStart = range.end + stackMap.toSeq.sortBy(_._1.start).dropWhile(_._1.start <= range.start).foreach { case (nextRange, nextRegion) => + if (remainingSize > 0) { + matchingRegions += nextRegion + remainingSize -= nextRange.size + nextStart = nextRange.end + } + } + } + } + + matchingRegions.toSet + } + + def getAllStackRegions: Set[StackRegion] = { + localStacks.values.toSet.flatten + } + + def getAllDataRegions: Set[DataRegion] = { + dataMap.values.toSet + } + + def getAllHeapRegions: Set[HeapRegion] = { + heapMap.values.toSet + } + + def getAllRegions: Set[MemoryRegion] = { + (getAllStackRegions ++ getAllDataRegions ++ getAllHeapRegions) + } + + /* All regions that start at value and are exactly of length size */ + def findStackFullAccessesOnly(value: BigInt, size: BigInt): Option[StackRegion] = { + stackMap.find((range, _) => range.start == value && range.size == size).map((range, obj) => obj) } def findStackObject(value: BigInt): Option[StackRegion] = From 5c0619e2574b1997a2cbab3d9feb9b31e7bd4efc Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Tue, 13 Aug 2024 15:03:25 +1000 Subject: [PATCH 035/104] Adding regions into IR --- src/main/scala/analysis/MemoryModelMap.scala | 176 +++++++++++++++++-- src/main/scala/analysis/RegionInjector.scala | 27 ++- 2 files changed, 189 insertions(+), 14 deletions(-) diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index fc719d945..eb14b983c 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -30,6 +30,8 @@ class MemoryModelMap { private val heapMap: mutable.Map[RangeKey, HeapRegion] = mutable.TreeMap() private val dataMap: mutable.Map[RangeKey, DataRegion] = mutable.TreeMap() + private val uf = new UnionFind() + /** Add a range and object to the mapping * * @param offset the offset of the range @@ -66,6 +68,15 @@ class MemoryModelMap { currentDataMap.addOne(updatedRange -> currentMaxRegion) currentDataMap(RangeKey(offset, MAX_BIGINT)) = d } + case h: HeapRegion => + val currentHeapMap = heapMap + if (currentHeapMap.isEmpty) { + currentHeapMap(RangeKey(offset, offset + h.size.value - 1)) = h + } else { + val currentMaxRange = currentHeapMap.keys.maxBy(_.end) + val currentMaxRegion = currentHeapMap(currentMaxRange) + currentHeapMap(RangeKey(currentMaxRange.start + 1, h.size.value - 1)) = h + } } } @@ -164,6 +175,22 @@ class MemoryModelMap { for (dataRgn <- allDataRgns) { add(dataRgn.start.value, dataRgn) } + + // add heap regions + val rangeStart = 0 + for ((position, regions) <- memoryRegions) { + regions match { + case Lift(node) => + for (region <- node) { + region match { + case heapRegion: HeapRegion => + add(BigInt(0), heapRegion) + case _ => + } + } + case LiftedBottom => + } + } } // TODO: push and pop could be optimised by caching the results def pushContext(funName: String): Unit = { @@ -247,38 +274,93 @@ class MemoryModelMap { } } - matchingRegions.toSet + matchingRegions.toSet.map(returnRegion) + } + + def getRegionsWithSize(size: BigInt, function: String, negateCondition: Boolean = false): Set[MemoryRegion] = { + val matchingRegions = scala.collection.mutable.Set[MemoryRegion]() + + pushContext(function) + stackMap.foreach { + case (range, region) => + if (negateCondition) { + if (range.size != size) { + matchingRegions += region + } + } else if (range.size == size) { + matchingRegions += region + } + } + popContext() + + heapMap.foreach { case (range, region) => + if (negateCondition) { + if (range.size != size) { + matchingRegions += region + } + } else if (range.size == size) { + matchingRegions += region + } + } + + dataMap.foreach { case (range, region) => + if (negateCondition) { + if (range.size != size) { + matchingRegions += region + } + } else if (range.size == size) { + matchingRegions += region + } + } + + matchingRegions.toSet.map(returnRegion) + } + + def getAllocsPerProcedure: Map[String, Set[StackRegion]] = { + localStacks.map((name, stackRegions) => (name, stackRegions.toSet.map(returnRegion))).toMap } def getAllStackRegions: Set[StackRegion] = { - localStacks.values.toSet.flatten + localStacks.values.toSet.flatten.map(returnRegion) } - + def getAllDataRegions: Set[DataRegion] = { - dataMap.values.toSet + dataMap.values.toSet.map(returnRegion) } - + def getAllHeapRegions: Set[HeapRegion] = { - heapMap.values.toSet + heapMap.values.toSet.map(returnRegion) } - + def getAllRegions: Set[MemoryRegion] = { - (getAllStackRegions ++ getAllDataRegions ++ getAllHeapRegions) + getAllStackRegions ++ getAllDataRegions ++ getAllHeapRegions + } + + def getEnd(memoryRegion: MemoryRegion): BigInt = { // TODO: This would return a list of ends + val range = memoryRegion match { + case stackRegion: StackRegion => + stackMap.find((_, obj) => obj == stackRegion).map((range, _) => range).getOrElse(RangeKey(0, 0)) + case heapRegion: HeapRegion => + heapMap.find((_, obj) => obj == heapRegion).map((range, _) => range).getOrElse(RangeKey(0, 0)) + case dataRegion: DataRegion => + dataMap.find((_, obj) => obj == dataRegion).map((range, _) => range).getOrElse(RangeKey(0, 0)) + } + range.end } /* All regions that start at value and are exactly of length size */ def findStackFullAccessesOnly(value: BigInt, size: BigInt): Option[StackRegion] = { - stackMap.find((range, _) => range.start == value && range.size == size).map((range, obj) => obj) + stackMap.find((range, _) => range.start == value && range.size == size).map((range, obj) => returnRegion(obj)) } def findStackObject(value: BigInt): Option[StackRegion] = - stackMap.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => obj) + stackMap.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => returnRegion(obj)) def findSharedStackObject(value: BigInt): Set[StackRegion] = - sharedStackMap.values.flatMap(_.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => obj)).toSet + sharedStackMap.values.flatMap(_.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => returnRegion(obj))).toSet def findDataObject(value: BigInt): Option[DataRegion] = - dataMap.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => obj) + dataMap.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => returnRegion(obj)) override def toString: String = s"Stack: $stackMap\n Heap: $heapMap\n Data: $dataMap\n" @@ -323,6 +405,29 @@ class MemoryModelMap { logRegion(range, region) } } + + def mergeRegions(regions: Set[MemoryRegion]): MemoryRegion = { + // assert regions are of the same type + regions.foreach(uf.makeSet) + regions.foreach(uf.union(regions.head, _)) + uf.find(regions.head) + } + + private def returnRegion(region: MemoryRegion): MemoryRegion = { + uf.find(region) + } + + private def returnRegion(region: StackRegion): StackRegion = { + uf.find(region.asInstanceOf[MemoryRegion]).asInstanceOf[StackRegion] + } + + private def returnRegion(region: DataRegion): DataRegion = { + uf.find(region.asInstanceOf[MemoryRegion]).asInstanceOf[DataRegion] + } + + private def returnRegion(region: HeapRegion): HeapRegion = { + uf.find(region.asInstanceOf[MemoryRegion]).asInstanceOf[HeapRegion] + } } trait MemoryRegion { @@ -340,3 +445,50 @@ case class HeapRegion(override val regionIdentifier: String, size: BitVecLiteral case class DataRegion(override val regionIdentifier: String, start: BitVecLiteral) extends MemoryRegion { override def toString: String = s"Data($regionIdentifier, $start)" } + +class UnionFind { + // Map to store the parent of each region + private val parent: mutable.Map[MemoryRegion, MemoryRegion] = mutable.Map() + + // Map to store the size of each set, used for union by rank + private val size: mutable.Map[MemoryRegion, Int] = mutable.Map() + + // Initialise each region to be its own parent and set size to 1 + def makeSet(region: MemoryRegion): Unit = { + parent(region) = region + size(region) = 1 + } + + // Find operation with path compression + def find(region: MemoryRegion): MemoryRegion = { + if (!parent.contains(region)) { + makeSet(region) + } + + if (parent(region) != region) { + parent(region) = find(parent(region)) // Path compression + } + parent(region) + } + + // Union operation with union by rank + def union(region1: MemoryRegion, region2: MemoryRegion): Unit = { + val root1 = find(region1) + val root2 = find(region2) + + if (root1 != root2) { + if (size(root1) < size(root2)) { + parent(root1) = root2 + size(root2) += size(root1) + } else { + parent(root2) = root1 + size(root1) += size(root2) + } + } + } + + // Check if two regions are in the same set + def connected(region1: MemoryRegion, region2: MemoryRegion): Boolean = { + find(region1) == find(region2) + } +} \ No newline at end of file diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index 63358aeab..fc8633f9b 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -4,11 +4,13 @@ import ir.* import util.Logger import scala.collection.immutable import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer /** * Replaces the region access with the calculated memory region. */ class RegionInjector(domain: mutable.Set[CFGPosition], + program: Program, constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], mmm: MemoryModelMap, reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], @@ -17,6 +19,8 @@ class RegionInjector(domain: mutable.Set[CFGPosition], def nodeVisitor(): Unit = { for (elem <- domain) {localTransfer(elem)} + program.initialMemory = transformMemorySections(program.initialMemory) + program.readOnlyMemory = transformMemorySections(program.readOnlyMemory) } /** @@ -206,7 +210,7 @@ class RegionInjector(domain: mutable.Set[CFGPosition], } else if (regions.size > 1) { Logger.warn(s"MemStore is: ${cmd}") Logger.warn(s"Multiple regions found for memory store: ${regions}") - expr + MemoryStore(Memory(mmm.mergeRegions(regions).regionIdentifier, mem.addressSize, mem.valueSize), eval(index, cmd), eval(value, cmd), endian, size) } else { Logger.warn(s"MemStore is: ${cmd}") Logger.warn(s"No region found for memory store") @@ -220,7 +224,7 @@ class RegionInjector(domain: mutable.Set[CFGPosition], } else if (regions.size > 1) { Logger.warn(s"MemLoad is: ${cmd}") Logger.warn(s"Multiple regions found for memory load: ${regions}") - expr + MemoryLoad(Memory(mmm.mergeRegions(regions).regionIdentifier, mem.addressSize, mem.valueSize), eval(index, cmd), endian, size) } else { Logger.warn(s"MemLoad is: ${cmd}") Logger.warn(s"No region found for memory load") @@ -254,4 +258,23 @@ class RegionInjector(domain: mutable.Set[CFGPosition], case call: IndirectCall => // ignore IndirectCall case _ => // ignore other kinds of nodes } + + def transformMemorySections(memorySegment: ArrayBuffer[MemorySection]): ArrayBuffer[MemorySection] = { + val newArrayBuffer = ArrayBuffer.empty[MemorySection] + for (elem <- memorySegment) { + elem match { + case mem: MemorySection => + val regions = mmm.findDataObject(mem.address) + if (regions.size == 1) { + newArrayBuffer += MemorySection(regions.head.regionIdentifier, mem.address, mem.size, mem.bytes) + Logger.warn(s"Region ${regions.get.regionIdentifier} found for memory section ${mem.address}") + } else { + newArrayBuffer += mem + Logger.warn(s"No region found for memory section ${mem.address}") + } + case _ => + } + } + newArrayBuffer + } } \ No newline at end of file From f623faa9c010387e26ea309bddc2ea0cdfc2298e Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Thu, 22 Aug 2024 16:24:59 +1000 Subject: [PATCH 036/104] Initial VSA work --- src/main/scala/analysis/AVLTree.scala | 179 ++++ src/main/scala/analysis/ActualVSA.scala | 752 +++++++++------- src/main/scala/analysis/ActualVSAold.scala | 60 ++ src/main/scala/analysis/Analysis.scala | 6 +- src/main/scala/analysis/BACKUPLAttice | 834 ++++++++++++++++++ src/main/scala/analysis/BitVectorEval.scala | 18 +- .../InterprocSteensgaardAnalysis.scala | 40 +- src/main/scala/analysis/LAST_VSA_BACKUP.scala | 276 ++++++ src/main/scala/analysis/Lattice.scala | 791 ++++++++++++++++- .../scala/analysis/MemoryRegionAnalysis.scala | 169 ++-- .../ReachingDefinitionsAnalysis.scala | 16 +- .../scala/analysis/RegToMemAnalysis.scala | 20 +- src/main/scala/analysis/UtilMethods.scala | 37 +- .../analysis/solvers/AbstractSPAnalysis.scala | 95 ++ src/main/scala/util/RunUtils.scala | 61 +- 15 files changed, 2878 insertions(+), 476 deletions(-) create mode 100644 src/main/scala/analysis/AVLTree.scala create mode 100644 src/main/scala/analysis/ActualVSAold.scala create mode 100644 src/main/scala/analysis/BACKUPLAttice create mode 100644 src/main/scala/analysis/LAST_VSA_BACKUP.scala create mode 100644 src/main/scala/analysis/solvers/AbstractSPAnalysis.scala diff --git a/src/main/scala/analysis/AVLTree.scala b/src/main/scala/analysis/AVLTree.scala new file mode 100644 index 000000000..ef65475a0 --- /dev/null +++ b/src/main/scala/analysis/AVLTree.scala @@ -0,0 +1,179 @@ +package analysis + +/** + * Node of the AVL tree. + * @param key + * @param value + * @param height + * @param left + * @param right + * @tparam K key type + * @tparam V value type + */ +case class Node[K, V](var key: K, var value: V, var height: Int, var left: Option[Node[K, V]], var right: Option[Node[K, V]]) + +/** + * AVL tree implementation. Ref. https://cs.indstate.edu/~kbalaraman/anew.pdf + * @param ordering + * @tparam K key type + * @tparam V value type + */ +class AVLTree[K, V](ordering: Ordering[K]) { + private var root: Option[Node[K, V]] = None + + // Get the height of the node + private def height(node: Option[Node[K, V]]): Int = node.map(_.height).getOrElse(0) + + // Rotate right + private def rotateRight(y: Node[K, V]): Node[K, V] = { + val x = y.left.get + val T2 = x.right + x.right = Some(y) + y.left = T2 + y.height = Math.max(height(y.left), height(y.right)) + 1 + x.height = Math.max(height(x.left), height(x.right)) + 1 + x + } + + // Rotate left + private def rotateLeft(x: Node[K, V]): Node[K, V] = { + val y = x.right.get + val T2 = y.left + y.left = Some(x) + x.right = T2 + x.height = Math.max(height(x.left), height(x.right)) + 1 + y.height = Math.max(height(y.left), height(y.right)) + 1 + y + } + + // Get balance factor of node N + private def getBalance(node: Option[Node[K, V]]): Int = node.map(n => height(n.left) - height(n.right)).getOrElse(0) + + // Insert a key-value pair + def insert(key: K, value: V): Unit = { + def insertNode(node: Option[Node[K, V]], key: K, value: V): Node[K, V] = { + if (node.isEmpty) return Node(key, value, 1, None, None) + + val n = node.get + + if (ordering.lt(key, n.key)) n.left = Some(insertNode(n.left, key, value)) + else if (ordering.gt(key, n.key)) n.right = Some(insertNode(n.right, key, value)) + else { + n.value = value + return n + } + + n.height = 1 + Math.max(height(n.left), height(n.right)) + val balance = getBalance(Some(n)) + + // Left Left Case + if (balance > 1 && ordering.lt(key, n.left.get.key)) return rotateRight(n) + + // Right Right Case + if (balance < -1 && ordering.gt(key, n.right.get.key)) return rotateLeft(n) + + // Left Right Case + if (balance > 1 && ordering.gt(key, n.left.get.key)) { + n.left = Some(rotateLeft(n.left.get)) + return rotateRight(n) + } + + // Right Left Case + if (balance < -1 && ordering.lt(key, n.right.get.key)) { + n.right = Some(rotateRight(n.right.get)) + return rotateLeft(n) + } + + n + } + + root = Some(insertNode(root, key, value)) + } + + // Search for a value by key + def search(key: K): Option[V] = { + def searchNode(node: Option[Node[K, V]], key: K): Option[V] = { + if (node.isEmpty) return None + + val n = node.get + + if (ordering.equiv(key, n.key)) Some(n.value) + else if (ordering.lt(key, n.key)) searchNode(n.left, key) + else searchNode(n.right, key) + } + + searchNode(root, key) + } + + // Delete a key-value pair + def delete(key: K): Unit = { + def minValueNode(node: Node[K, V]): Node[K, V] = { + var current = node + while (current.left.isDefined) current = current.left.get + current + } + + def deleteNode(node: Option[Node[K, V]], key: K): Option[Node[K, V]] = { + if (node.isEmpty) return None + + val n = node.get + + if (ordering.lt(key, n.key)) n.left = deleteNode(n.left, key) + else if (ordering.gt(key, n.key)) n.right = deleteNode(n.right, key) + else { + if (n.left.isEmpty || n.right.isEmpty) { + val temp = if (n.left.isDefined) n.left else n.right + if (temp.isEmpty) return None + else return temp + } else { + val temp = minValueNode(n.right.get) + n.key = temp.key + n.value = temp.value + n.right = deleteNode(n.right, temp.key) + } + } + + n.height = Math.max(height(n.left), height(n.right)) + 1 + val balance = getBalance(Some(n)) + + // Left Left Case + if (balance > 1 && getBalance(n.left) >= 0) return Some(rotateRight(n)) + + // Left Right Case + if (balance > 1 && getBalance(n.left) < 0) { + n.left = Some(rotateLeft(n.left.get)) + return Some(rotateRight(n)) + } + + // Right Right Case + if (balance < -1 && getBalance(n.right) <= 0) return Some(rotateLeft(n)) + + // Right Left Case + if (balance < -1 && getBalance(n.right) > 0) { + n.right = Some(rotateRight(n.right.get)) + return Some(rotateLeft(n)) + } + + Some(n) + } + + root = deleteNode(root, key) + } +} + +// Example usage +object AVLTreeExample extends App { + val avl = new AVLTree[Int, String](Ordering.Int) + avl.insert(10, "Value10") + avl.insert(20, "Value20") + avl.insert(30, "Value30") + avl.insert(40, "Value40") + avl.insert(50, "Value50") + avl.insert(25, "Value25") + + println(avl.search(25)) // Some(Value25) + println(avl.search(100)) // None + + avl.delete(25) + println(avl.search(25)) // None +} diff --git a/src/main/scala/analysis/ActualVSA.scala b/src/main/scala/analysis/ActualVSA.scala index 9954f04a3..2a7b2dac4 100644 --- a/src/main/scala/analysis/ActualVSA.scala +++ b/src/main/scala/analysis/ActualVSA.scala @@ -1,336 +1,416 @@ -package analysis -import ir._ -import util._ -import scala.collection.mutable -import analysis.BitVectorEval._ - -class ActualVSA(program: Program, - constantPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], - mmm: MemoryModelMap) { - - enum Flag { - case CF // Carry Flag - case ZF // Zero Flag - case SF // Sign Flag - case PF // Parity Flag - case AF // Auxiliary Flag - case OF // Overflow Flag - } - - enum Bool3 { - case True - case False - case Maybe - } - - /** - * SI class that represents a strided interval - * s is the stride - * l is the lower bound - * u is the upper bound - * [l, u] is the interval - * [l, u] \ s is the set of values - * 0[l,l] represents the singleton set {l} - */ - case class StridedInterval(s: BitVecLiteral, lb: BitVecLiteral, ub: BitVecLiteral) { - require(smt_bvule(lb, ub) == TrueLiteral, "Lower bound must be less than or equal to upper bound") - - // Meaning of a strided interval - def gamma: Set[BitVecLiteral] = { - smt_interval(lb, ub, s) - } - - override def toString: String = { - s"$s[$lb, $ub]" - } - - // Addition - def +(that: StridedInterval): StridedInterval = { - val newLb = smt_bvadd(this.lb, that.lb) - val newUb = smt_bvadd(this.ub, that.ub) - val newS = gcd(this.s, that.s) - StridedInterval(newS, newLb, newUb) - } - - // Bitwise NOT - def unary_~ : StridedInterval = { - StridedInterval(s, smt_bvnot(ub), smt_bvnot(lb)) - } - - // Bitwise AND - def &(that: StridedInterval): StridedInterval = { - val lbAnd = smt_bvand(this.lb, that.lb) - val ubAnd = smt_bvand(this.ub, that.ub) - StridedInterval(gcd(this.s, that.s), lbAnd, ubAnd) - } - - // Intersection of two strided intervals - def intersect(that: StridedInterval): Option[StridedInterval] = { - val newLb = smt_max(this.lb, that.lb) - val newUb = smt_min(this.ub, that.ub) - val newS = smt_gcd(this.s, that.s) - if (smt_bvule(newLb, newUb) == TrueLiteral) Some(StridedInterval(newS, newLb, newUb)) else None - } - - // join of two or more strided intervals - def join(that: StridedInterval): StridedInterval = { - val newLb = smt_min(this.lb, that.lb) - val newUb = smt_max(this.ub, that.ub) - val newS = gcd(this.s, that.s) - StridedInterval(newS, newLb, newUb) - } - - // Helper function to compute the greatest common divisor - private def gcd(a: BitVecLiteral, b: BitVecLiteral): BitVecLiteral = { - if (b.value == 0) a else gcd(b, smt_bvsmod(a, b)) - } - } - - /** - * A single value set is a map from regions to strided intervals - * @param intervals - */ - case class ValueSet(intervals: Set[StridedInterval]) { - - def gamma: Set[BitVecLiteral] = { - intervals.flatMap(_.gamma) - } - - // Union of two value sets - def union(that: ValueSet): ValueSet = { - ValueSet(this.intervals ++ that.intervals) - } - - // Intersection of two value sets - def intersect(that: ValueSet): ValueSet = { - val newIntervals = for { - a <- this.intervals - b <- that.intervals - inter = a.intersect(b) if inter.isDefined - } yield inter.get - ValueSet(newIntervals) - } - - // Addition of value sets - def +(that: ValueSet): ValueSet = { - val newIntervals = for { - a <- this.intervals - b <- that.intervals - } yield a + b - ValueSet(newIntervals) - } - - // Addition of a constant to a value set - def +(c: BitVecLiteral): ValueSet = { - val newIntervals = for { - a <- this.intervals - } yield StridedInterval(a.s, smt_bvadd(a.lb, c), smt_bvadd(a.ub, c)) // TODO: Should Stride change? - ValueSet(newIntervals) - } - } - - // top element of the lattice - private object ValueSetLattice { - val TOP: ValueSet = ValueSet(Set(StridedInterval(BitVecLiteral(BigInt(1), 64), BitVecLiteral(BigInt(0), 64), BitVecLiteral(BigInt(Long.MaxValue), 64)))) - val BOTTOM: ValueSet = ValueSet(mmm.getAllRegions.map(r => Set())) // TODO: should be all regions mapped to empty set - } - - - case class AlocEnv(allocs: Set[MemoryRegion]) { - def join(that: AlocEnv): AlocEnv = { - AlocEnv(this.allocs ++ that.allocs) - } - } - //private type AbsEnv = mutable.Map[Variable | MemoryRegion, ValueSet] | mutable.Map[MemoryRegion, AlocEnv] | mutable.Map[Flag, Bool3] - //private type AbsEnv = mutable.Map[Variable | MemoryRegion | Flag, ValueSet | AlocEnv | Bool3] - case class AbsEnv( - env1: mutable.Map[Variable, ValueSet], - env2: mutable.Map[MemoryRegion, AlocEnv], - env3: mutable.Map[Flag, Bool3] - ): - def join(that: AbsEnv): AbsEnv = { - AbsEnv( - env1 ++ that.env1, - env2 ++ that.env2, - env3 ++ that.env3 - ) - } - - /** - * ∗(vs, s): Returns a pair of sets (F, P). F represents the set of “fully accessed” a-locs: it - * consists of the a-locs that are of size s and whose starting addresses are in vs. P represents - * the set of “partially accessed” a-locs: it consists of (i) a-locs whose starting addresses are in - * vs but are not of size s, and (ii) a-locs whose addresses are in vs but whose starting addresses - * and sizes do not meet the conditions to be in F. [Reference VSA paper] - * - * @param vsR2 - * @param s - * @return - */ - private def dereference(vsR2: ValueSet, s: BigInt): (Set[MemoryRegion], Set[MemoryRegion]) = { - // TODO: Global memory size can be retrieved from the symbol table and are of size s - // Map addresses to exact memory locations - val fullyAccessedLocations = vsR2.gamma.flatMap(address => mmm.findStackFullAccessesOnly(address.value, s)) - - // Identify partially accessed locations (if any) - val partiallyAccessedLocations = vsR2.gamma.flatMap(address => mmm.findStackPartialAccessesOnly(address.value, s)) - - // Return the set of fully accessed locations and the set of partially accessed locations - (fullyAccessedLocations.diff(partiallyAccessedLocations).asInstanceOf[Set[MemoryRegion]], partiallyAccessedLocations.asInstanceOf[Set[MemoryRegion]]) - } - - private def RemoveLowerBounds(vs: ValueSet): ValueSet = { - val newIntervals = for { - a <- vs.intervals - } yield StridedInterval(a.s, BitVecLiteral(BigInt(0), a.ub.size), a.ub) - ValueSet(newIntervals) - } - - private def RemoveUpperBounds(vs: ValueSet): ValueSet = { - val newIntervals = for { - a <- vs.intervals - } yield StridedInterval(a.s, a.lb, BitVecLiteral(BigInt(Long.MaxValue), a.lb.size)) - ValueSet(newIntervals) - } - - private def joinValueSets(vs1: ValueSet, vs2: ValueSet): ValueSet = { - vs1.union(vs2) - } - - private def meetValueSets(vs1: ValueSet, vs2: ValueSet): ValueSet = { - vs1.intersect(vs2) - } - - def AbstractTransformer(in: AbsEnv, instruction: CFGPosition): AbsEnv = { - instruction match { - case p: Procedure => in - case b: Block => in - case c: Command => - c match - case statement: Statement => - statement match - case localAssign: LocalAssign => - localAssign.rhs match - case binOp: BinaryExpr => - if (binOp.arg1.isInstanceOf[Variable]) { - val R1 = localAssign.lhs - val R2 = binOp.arg1.asInstanceOf[Variable] - val c = evaluateExpression(binOp.arg2, constantPropResult(instruction)) - - // R1 = R2 + c - val out = in - val vs_R2: ValueSet = in.env1.getOrElseUpdate(R2, ValueSetLattice.BOTTOM) - out.env1(R1) = vs_R2 + c.get - out - } else { - in - } - case memoryLoad: MemoryLoad => - memoryLoad.index match - case binOp: BinaryExpr => - if (binOp.arg2.isInstanceOf[Variable]) { - val R1 = localAssign.lhs - val R2 = binOp.arg1.asInstanceOf[Variable] // TODO: Is R2 always a variable? - val out = in - getDefinition(binOp.arg2.asInstanceOf[Variable], instruction, reachingDefs).foreach { - d => - d.rhs match - case binOp2: BinaryExpr => - val c1 = evaluateExpression(binOp2.arg1, constantPropResult(instruction)) - val c2 = evaluateExpression(binOp2.arg2, constantPropResult(instruction)) - // R1 = *(R2 + c1) + c2 - val vs_R2: ValueSet = in.env1(R2) - val s = memoryLoad.size // s is the size of dereference performed by the instruction - val (f: Set[MemoryRegion], p: Set[MemoryRegion]) = dereference(vs_R2 + c1.get, BigInt(s)) - println("VSA") - println(f) - if (p.isEmpty) { - val vs_rhs = f.map(in.env1(_)).reduce(joinValueSets) - out.env1(R1) = vs_rhs + c2.get - } else { - out.env1(R1) = ValueSetLattice.TOP - } - case _ => - } - out - } else { - in - } - case _ => in // TODO: Handle other cases - case variable: Variable => - val R1 = localAssign.lhs - val R2 = variable - // R1 >= R2 - val out = in - val vs_R1 = in.env1.getOrElseUpdate(R1, ValueSetLattice.BOTTOM) - val vs_R2 = in.env1(R2) - val vs_lb = RemoveUpperBounds(vs_R2) - val vs_ub = RemoveLowerBounds(vs_R1) - out.env1(R1) = vs_R1.intersect(vs_lb) - out.env1(R2) = vs_R2.intersect(vs_ub) - out - case bitVecLiteral: BitVecLiteral => - val R1 = localAssign.lhs - val c = bitVecLiteral - // R1 <= c - val vs_c = ValueSet(Set(StridedInterval(smt_gcd(BitVecLiteral(BigInt(0), c.size), c), BitVecLiteral(BigInt(0), c.size), c))) // TODO: Fix ME - val out = in - out.env1(R1) = meetValueSets(in.env1(R1), vs_c) - out - case _ => in // TODO: Handle other cases - case memoryAssign: MemoryAssign => in // TODO: *(R1 + c1) = R2 + c2 - case nop: NOP => in - case assert: Assert => in - case assume: Assume => in - case jump: Jump => in - } - } - - def IntraProceduralVSA(): mutable.Map[CFGPosition, AbsEnv] = { - val worklist = new mutable.Queue[CFGPosition]() - worklist.enqueue(program.mainProcedure) - val allStackRegions: Set[StackRegion] = mmm.getAllStackRegions() - val allDataRegions: Set[DataRegion] = mmm.getAllDataRegions() - val allHeapRegions: Set[HeapRegion] = mmm.getAllHeapRegions() - - val allocatedStackRegions = AlocEnv(allStackRegions) - val allocatedDataRegions = AlocEnv(allDataRegions) - val allocatedHeapRegions = AlocEnv(allHeapRegions) - - val stackManyToOne = allStackRegions.map(r => r -> allocatedStackRegions).toMap - val dataManyToOne = allDataRegions.map(r => r -> allocatedDataRegions).toMap - val heapManyToOne = allHeapRegions.map(r => r -> allocatedHeapRegions).toMap - - val combinedMap = stackManyToOne ++ dataManyToOne ++ heapManyToOne - val flagsToMaybe = Flag.values.map(f => f -> Bool3.Maybe).toMap - - val absEnv_enter = AbsEnv(mutable.Map().withDefault(_ => ValueSetLattice.BOTTOM), mutable.Map() ++ combinedMap, mutable.Map() ++ flagsToMaybe) - val abstractStates = mutable.Map[CFGPosition, AbsEnv](worklist.head -> absEnv_enter) - while(worklist.nonEmpty) { - val n: CFGPosition = worklist.dequeue() - val m = IntraProcIRCursor.succ(n) - for (succ <- m) { - mmm.popContext() - mmm.pushContext(IRWalk.procedure(n).name) - val edge_amc = AbstractTransformer(abstractStates(n), succ) - Propagate(succ, edge_amc) - } - } - - def Propagate(n: CFGPosition, edge_amc: AbsEnv): Unit = { - if (!abstractStates.contains(n)) { - abstractStates(n) = edge_amc - worklist.enqueue(n) - } else { - val oldEnv = abstractStates(n) - val newEnv = oldEnv.join(edge_amc) - if (newEnv != oldEnv) { - abstractStates(n) = newEnv - worklist.enqueue(n) - } - } - } - abstractStates - } -} +//package analysis +//import ir.* +//import util.* +// +//import scala.collection.mutable +//import analysis.BitVectorEval.* +//import analysis.* +// +//class ActualVSA(program: Program, +// constantPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], +// reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], +// mmm: MemoryModelMap) { +// +// // TODO: This assumes no function is called Data or Heap (should be a tuple instead) +// val DATA_REGION_NAME = "Data" +// val HEAP_REGION_NAME = "Heap" +// +// val valueSetLattice: ValueSetLattice[MemRgn] = ValueSetLattice() +// val flagLattice: FlagLattice = FlagLattice() +// +// type MemRgn = String // all record titles +// type aaloc = MemoryRegion +// +// val MEMORY_REGIONS: List[MemRgn] = (Set(DATA_REGION_NAME, HEAP_REGION_NAME) ++ mmm.getAllocsPerProcedure.keySet).toList.sorted +// val ALLOCS: Map[String, Set[aaloc]] = mmm.getAllocsPerProcedure.asInstanceOf[Map[String, Set[aaloc]]] ++ Map("Data" -> mmm.getAllDataRegions.asInstanceOf[Set[aaloc]], "Heap" -> mmm.getAllHeapRegions.asInstanceOf[Set[aaloc]]) +// +// +// private val stackPointer = Register("R31", BitVecType(64)) +//// /** +//// * ∗(vs, s): Returns a pair of sets (F, P). F represents the set of “fully accessed” a-locs: it +//// * consists of the a-locs that are of size s and whose starting addresses are in vs. P represents +//// * the set of “partially accessed” a-locs: it consists of (i) a-locs whose starting addresses are in +//// * vs but are not of size s, and (ii) a-locs whose addresses are in vs but whose starting addresses +//// * and sizes do not meet the conditions to be in F. [Reference VSA paper] +//// * +//// * @param vsR2 +//// * @param s size of the dereference +//// * @return +//// */ +//// def dereference(s: BigInt): (Set[MemoryRegion], Set[MemoryRegion]) = { +//// // TODO: Global memory size can be retrieved from the symbol table and are of size s +//// // Map addresses to exact memory locations +//// val fullyAccessedLocations = stridedInterval.gamma.flatMap(address => mmm.findStackFullAccessesOnly(address.value, s)) +//// +//// // Identify partially accessed locations (if any) +//// val partiallyAccessedLocations = stridedInterval.gamma.flatMap(address => mmm.findStackPartialAccessesOnly(address.value, s)) +//// +//// // Return the set of fully accessed locations and the set of partially accessed locations +//// (fullyAccessedLocations.diff(partiallyAccessedLocations).asInstanceOf[Set[MemoryRegion]], partiallyAccessedLocations.asInstanceOf[Set[MemoryRegion]]) +//// } +//// } +// +// /** +// * Allocs Structure +// * Procedures +// * main -> {alloc1, alloc2, alloc3} +// * foo -> {alloc4, alloc5} +// * Data +// * Data -> {alloc6, alloc7} +// * Heap +// * Heap -> {alloc8, alloc9} +// */ +// case class AbsEnv(): +// var regEnv: mutable.Map[Variable, ValueSet[MemRgn]] = mutable.Map[Variable, ValueSet[MemRgn]]().withDefault(_ => valueSetLattice.bottom) +// var alocEnv: mutable.Map[aaloc, ValueSet[MemRgn]] = mutable.Map[aaloc, ValueSet[MemRgn]]().withDefault(_ => valueSetLattice.bottom) +// var flagEnv: Flag = FlagMap(Map[Flags, Bool3]()) +// +// def join(absEnv: AbsEnv): AbsEnv = { +// val out = AbsEnv() +// // unify regs +// absEnv.regEnv.foreach((k, v) => +// if (regEnv.contains(k)) { +// out.regEnv(k) = valueSetLattice.lub(regEnv(k), v) +// } else { +// out.regEnv(k) = v +// }) +// // unify alocs +// absEnv.alocEnv.foreach((k, v) => +// if (alocEnv.contains(k)) { +// out.alocEnv(k) = valueSetLattice.lub(alocEnv(k), v) +// } else { +// out.alocEnv(k) = v +// }) +// // unify flags +// out.flagEnv = flagLattice.lub(flagEnv, absEnv.flagEnv) +// out +// } +// +// override def toString: String = { +// val regEnvStr = regEnv.map((k, v) => s"$k -> $v").mkString("\n") +// val alocEnvStr = alocEnv.map((k, v) => s"$k -> $v").mkString("\n") +// val flagEnvStr = flagEnv.toString +// s"RegEnv:\n$regEnvStr\nAlocEnv:\n$alocEnvStr\nFlagEnv:\n$flagEnvStr" +// } +// +// // TODO: This is not very accurate and would need a better pattern matching +// def exprToRegion(expr: Expr, n: CFGPosition): Option[MemoryRegion] = { +// expr match { +// case binOp: BinaryExpr if binOp.arg1 == stackPointer => +// evaluateExpression(binOp.arg2, constantPropResult(n)) match { +// case Some(b: BitVecLiteral) => mmm.findStackObject(b.value) +// case None => None +// } +// case _ => +// evaluateExpression(expr, constantPropResult(n)) match { +// case Some(b: BitVecLiteral) => mmm.findDataObject(b.value) +// case None => None +// } +// } +// } +// +// def evaluateValueSet(expr: Expr, absEnv: AbsEnv, n: CFGPosition): ValueSet[MemRgn] = { +// expr match +// case literal: Literal => +// literal match +// case lit: BoolLit => ??? +// case BitVecLiteral(value, size) => +// val si = valueSetLattice.lattice.singletonSI(value, size) +// val memoryRegionMap = MEMORY_REGIONS.map(i => i -> si).toMap +// VS(memoryRegionMap) +// case IntLiteral(value) => ??? +// case Extract(end, start, body) => ??? +// case Repeat(repeats, body) => ??? +// case ZeroExtend(extension, body) => ??? +// case SignExtend(extension, body) => ??? +// case UnaryExpr(op, arg) => +// arg match { +// case v1: Variable => +// val VS_v1 = absEnv.regEnv(v1) +// valueSetLattice.applyOp(op, VS_v1) +// case _ => valueSetLattice.applyOp(op, evaluateValueSet(arg, absEnv, n)) +// } +// case BinaryExpr(op, arg1, arg2) => +// (arg1, arg2) match { +// case (v1: Variable, v2: Variable) => +// val VS_v1 = absEnv.regEnv(v1) +// val VS_v2 = absEnv.regEnv(v2) +// valueSetLattice.applyOp(op, VS_v1, Left(VS_v2)) +// case (v1: Variable, c: BitVecLiteral) => +// val VS_v1 = absEnv.regEnv(v1) +// valueSetLattice.applyOp(op, VS_v1, Right(c)) +// case (c: BitVecLiteral, v1: Variable) => +// val VS_v1 = absEnv.regEnv(v1) +// valueSetLattice.applyOp(op, VS_v1, Right(c)) +// case _ => +// val VS_arg1 = evaluateValueSet(arg1, absEnv, n) +// val VS_arg2 = evaluateValueSet(arg2, absEnv, n) +// valueSetLattice.applyOp(op, VS_arg1, Left(VS_arg2)) +// } +// case MemoryStore(mem, index, value, endian, size) => ??? +// case MemoryLoad(mem, index, endian, size) => +// val region = exprToRegion(index, n) +// if (region.isDefined) { +// absEnv.alocEnv(region.get) +// } else { +// valueSetLattice.bottom +// } +// case Memory(name, addressSize, valueSize) => ??? +// case variable: Variable => absEnv.regEnv(variable) +// } +// +// /** Default implementation of eval. +// */ +// def eval(cmd: Command, s: AbsEnv, n: CFGPosition): Map[Variable | MemoryRegion, Set[Value]] = { +// Logger.debug(s"eval: $cmd") +// Logger.debug(s"state: $s") +// Logger.debug(s"node: $n") +// cmd match +// case localAssign: LocalAssign => +// localAssign.rhs match +// case memoryLoad: MemoryLoad => +// exprToRegion(memoryLoad.index, n) match +// case Some(r: MemoryRegion) => +// // this is an exception to the rule and only applies to data regions +// evaluateExpression(memoryLoad.index, constantPropResult(n)) match +// case Some(bitVecLiteral: BitVecLiteral) => +// m = m + (r -> Set(getValueType(bitVecLiteral))) +// m = m + (localAssign.lhs -> m(r)) +// m +// +// val vs_r1 = s.regEnv(localAssign.lhs) +// val singleton = Set(getValueType(bitVecLiteral)) +// +// valueSetLattice.lub() +// +// case None => +// m = m + (localAssign.lhs -> m(r)) +// m +// +// +// case None => +// Logger.warn("could not find region for " + localAssign) +// m +// case e: Expr => +// evaluateExpression(e, constantPropResult(n)) match { +// case Some(bv: BitVecLiteral) => +// m = m + (localAssign.lhs -> Set(getValueType(bv))) +// m +// case None => +// Logger.warn("could not evaluate expression" + e) +// m +// } +// case memAssign: MemoryAssign => +// memAssign.rhs.index match +// case binOp: BinaryExpr => +// val region: Option[MemoryRegion] = exprToRegion(binOp, n) +// region match +// case Some(r: MemoryRegion) => +// val storeValue = memAssign.rhs.value +// evaluateExpression(storeValue, constantPropResult(n)) match +// case Some(bitVecLiteral: BitVecLiteral) => +// m = m + (r -> Set(getValueType(bitVecLiteral))) +// m +// /* +// // TODO constant prop returned BOT OR TOP. Merge regions because RHS could be a memory loaded address +// case variable: Variable => +// s + (r -> s(variable)) +// */ +// case None => +// storeValue.match { +// case v: Variable => +// m = m + (r -> m(v)) +// m +// case _ => +// Logger.warn(s"Too Complex: $storeValue") // do nothing +// m +// } +// case None => +// Logger.warn("could not find region for " + memAssign) +// m +// case _ => +// m +// case _ => +// m +// } +// +// def AbstractTransformer(in: AbsEnv, n: CFGPosition): AbsEnv = { +// if (IRWalk.procedure(n) == n) { +// mmm.pushContext(n.asInstanceOf[Procedure].name) +// in +// } else if (IRWalk.procedure(n).end == n) { +// mmm.popContext() +// in +// } else n match +// case command: Command => +// eval(command, in, n) +// case _ => +// in +//// instruction match { +//// case p: Procedure => in +//// case b: Block => in +//// case c: Command => +//// c match +//// case statement: Statement => +//// statement match +//// case localAssign: LocalAssign => +//// localAssign.rhs match +//// case binOp: BinaryExpr => +//// if (binOp.arg1.isInstanceOf[Variable]) { +//// val R1 = localAssign.lhs +//// val R2 = binOp.arg1.asInstanceOf[Variable] +//// val c = evaluateExpression(binOp.arg2, constantPropResult(instruction)) +//// if (c.isDefined) { +//// +//// // R1 = R2 + c +//// val out = in +//// val vs_R2: ValueSet[MemRgn] = in.regEnv(R2) +//// out.regEnv(R1) = valueSetLattice.add(vs_R2, c.get) +//// return out +//// } +//// } +//// in +//// case memoryLoad: MemoryLoad => +//// memoryLoad.index match +//// case binOp: BinaryExpr => +//// if (binOp.arg2.isInstanceOf[Variable]) { +//// val R1 = localAssign.lhs +//// val R2 = binOp.arg1.asInstanceOf[Variable] // TODO: Is R2 always a variable? +//// val out = in +//// getDefinition(binOp.arg2.asInstanceOf[Variable], instruction, reachingDefs).foreach { +//// d => +//// d.rhs match +//// case binOp2: BinaryExpr => +//// val c1 = evaluateExpression(binOp2.arg1, constantPropResult(instruction)) +//// val c2 = evaluateExpression(binOp2.arg2, constantPropResult(instruction)) +//// // R1 = *(R2 + c1) + c2 +//// val vs_R2: ValueSet[String] = in.regEnv(R2) +//// val s = memoryLoad.size // s is the size of dereference performed by the instruction +//// val (f: Set[MemoryRegion], p: Set[MemoryRegion]) = valueSetLattice.dereference(BigInt(s), vs_R2, mmm) +//// if (p.isEmpty) { +//// val vs_rhs = f.map(r => in.alocEnv(r)).foldLeft(valueSetLattice.bottom)((a, b) => valueSetLattice.lub(a, b)) +//// out.regEnv(R1) = valueSetLattice.add(vs_rhs, c2.get) +//// } else { +//// out.regEnv(R1) = valueSetLattice.top +//// } +//// case _ => +//// } +//// out +//// } else { +//// in +//// } +//// case _ => in // TODO: Handle other cases +//// case variable: Variable => in +////// val R1 = localAssign.lhs +////// val R2 = variable +////// // R1 >= R2 +////// val out = in +////// val vs_R1 = in.env1.getOrElseUpdate(R1, ValueSetLattice.BOTTOM) +////// val vs_R2 = in.env1(R2) +////// val vs_lb = vs_R2.removeUpperBounds() +////// val vs_ub = vs_R1.removeLowerBounds() +////// out.env1(R1) = vs_R1.meet(vs_lb) +////// out.env1(R2) = vs_R2.meet(vs_ub) +////// out +//// case bitVecLiteral: BitVecLiteral => in +////// val R1 = localAssign.lhs +////// val c = bitVecLiteral +////// // R1 <= c +////// // from 0 to c, all value sets are possible (ie. stack, global) TODO: this may be wrong because of the _ join _? +////// val interval = bitVec_interval(BitVecLiteral(0, c.size), c, BitVecLiteral(1, c.size)) +////// val regions: mutable.Set[MemoryRegion] = mutable.Set() +////// println(c) +////// interval.foreach(v => +////// val dataObject = mmm.findDataObject(v.value) +////// if dataObject.isDefined then regions.add(dataObject.get) +////// ) +////// TOP_STRIDE.gamma.map(v => regions.add(mmm.findStackObject(v.value).get)) +////// +////// val allValueSets: mutable.Set[ValueSet] = mutable.Set() +////// regions.foreach(r => allValueSets.add(in.env2(r).getAAlloc(r).valueSet)) +////// val vs_c = allValueSets.fold(ValueSetLattice.BOTTOM)(_ join _) +////// val out = in +////// out.env1(R1) = in.env1(R1).meet(vs_c) +////// out +//// +////// val vs_c = ValueSet(Set(StridedInterval(smt_gcd(BitVecLiteral(BigInt(0), c.size), c), BitVecLiteral(BigInt(0), c.size), c))) // TODO: Fix ME +////// val out = in +////// out.env1(R1) = in.env1(R1).meet(vs_c) +////// out +//// case _ => in // TODO: Handle other cases +//// case memoryAssign: MemoryAssign => +//// val out = in +//// // TODO: *(R1 + c1) = R2 + c2 +//// memoryAssign.rhs.index match { +//// case binaryExpr: BinaryExpr => +//// binaryExpr.arg2 match { +//// case bitVecLiteral: BitVecLiteral => +//// memoryAssign.rhs.value match { +//// case binaryExprRHS: BinaryExpr => +//// binaryExprRHS.arg2 match { +//// case bitVecLiteralRHS: BitVecLiteral => +//// val R1 = binaryExpr.arg1.asInstanceOf[Variable] +//// val c1 = bitVecLiteral +//// val R2 = binaryExprRHS.arg1.asInstanceOf[Variable] +//// val c2 = bitVecLiteralRHS +//// +//// val vs_R1: ValueSet[MemRgn] = in.regEnv(R1) +//// val vs_R2: ValueSet[MemRgn] = in.regEnv(R2) +//// val proc: Procedure = IRWalk.procedure(instruction) +//// val (f: Set[MemoryRegion], p: Set[MemoryRegion]) = valueSetLattice.dereference(BigInt(memoryAssign.lhs.valueSize), valueSetLattice.add(vs_R1, c1), mmm) +//// +//// if (f.size == 1 && p.size == 0) { // TODO: must check if f has no heap or recursive proc aalocs +//// out.alocEnv(f.head) = valueSetLattice.add(vs_R2, c2) // strong update +//// } else { +//// f.foreach(v => out.alocEnv(v) = valueSetLattice.lub(out.alocEnv(v), valueSetLattice.add(vs_R2, c2))) // weak update +//// } +//// p.foreach(v => out.alocEnv(v) = valueSetLattice.top) // Set partial accesses to top +//// case _ => +//// } +//// case _ => +//// } +//// case _ => // TODO: Should we evaluate here? +//// } +//// case _ => // // TODO: Should we evaluate here? +//// } +//// out +//// case nop: NOP => in +//// case assert: Assert => in +//// case assume: Assume => in +//// case jump: Jump => in +//// } +// } +// +// def IntraProceduralVSA(): mutable.Map[CFGPosition, AbsEnv] = { +// val worklist = new mutable.Queue[CFGPosition]() +// worklist.enqueue(program.mainProcedure) +// +// val absEnv_enter = AbsEnv() +// val abstractStates = mutable.Map[CFGPosition, AbsEnv](worklist.head -> absEnv_enter) +// while(worklist.nonEmpty) { +// val n: CFGPosition = worklist.dequeue() +// val m = IntraProcIRCursor.succ(n) +// for (succ <- m) { +// mmm.popContext() +// mmm.pushContext(IRWalk.procedure(n).name) +// val edge_amc = AbstractTransformer(abstractStates(n), succ) +// Propagate(succ, edge_amc) +// } +// } +// +// def Propagate(n: CFGPosition, edge_amc: AbsEnv): Unit = { +// if (!abstractStates.contains(n)) { +// abstractStates(n) = edge_amc +// worklist.enqueue(n) +// } else { +// val oldEnv = abstractStates(n) +// val newEnv = oldEnv.join(edge_amc) +// if (newEnv != oldEnv) { +// abstractStates(n) = newEnv +// worklist.enqueue(n) +// } +// } +// } +// abstractStates +// } +//} diff --git a/src/main/scala/analysis/ActualVSAold.scala b/src/main/scala/analysis/ActualVSAold.scala new file mode 100644 index 000000000..2de052c1c --- /dev/null +++ b/src/main/scala/analysis/ActualVSAold.scala @@ -0,0 +1,60 @@ +//package analysis +// +//import ir.* +//import analysis.solvers._ +// +//import scala.collection.immutable +// +//trait ActualVSA(program: Program) { +// +// val powersetLattice: PowersetLattice[Variable] = PowersetLattice() +// +// val lattice: MapLattice[CFGPosition, Set[Variable], PowersetLattice[Variable]] = MapLattice(powersetLattice) +// +// val domain: Set[CFGPosition] = Set.empty ++ program +// +// private val stackPointer = Register("R31", BitVecType(64)) +// private val linkRegister = Register("R30", BitVecType(64)) +// private val framePointer = Register("R29", BitVecType(64)) +// +// private val ignoreRegions: Set[Expr] = Set(linkRegister, framePointer, stackPointer) +// +// /** Default implementation of eval. +// */ +// def eval(cmd: Command, s: Set[Variable]): Set[Variable] = { +// var m = s +// cmd match { +// case assume: Assume => +// m.diff(assume.body.variables) +// case assert: Assert => +// m.diff(assert.body.variables) +// case memoryAssign: MemoryAssign => +// m.diff(memoryAssign.lhs.variables ++ memoryAssign.rhs.variables) +// case indirectCall: IndirectCall => +// m - indirectCall.target +// case localAssign: LocalAssign => +// m = m.diff(localAssign.rhs.variables) +// if ignoreRegions.contains(localAssign.lhs) then m else m + localAssign.lhs +// case _ => +// m +// } +// } +// +// /** Transfer function for state lattice elements. +// */ +// def localTransfer(n: CFGPosition, s: Set[Variable]): Set[Variable] = n match { +// case cmd: Command => +// eval(cmd, s) +// case _ => s // ignore other kinds of nodes +// } +// +// /** Transfer function for state lattice elements. +// */ +// def transfer(n: CFGPosition, s: Set[Variable]): Set[Variable] = localTransfer(n, s) +//} +// +//class ANRAnalysisSolver(program: Program) extends ANRAnalysis(program) +// with IRIntraproceduralForwardDependencies +// with Analysis[Map[CFGPosition, Set[Variable]]] +// with SimpleWorklistFixpointSolver[CFGPosition, Set[Variable], PowersetLattice[Variable]] { +//} \ No newline at end of file diff --git a/src/main/scala/analysis/Analysis.scala b/src/main/scala/analysis/Analysis.scala index 9ab736160..7731f3798 100644 --- a/src/main/scala/analysis/Analysis.scala +++ b/src/main/scala/analysis/Analysis.scala @@ -164,10 +164,10 @@ trait ConstantPropagationWithSSA(val program: Program, val reachingDefs: Map[CFG // assignments case a: Assign => val lhsWrappers = s.collect { - case (k, v) if RegisterVariableWrapper(k.variable, k.assigns) == RegisterVariableWrapper(a.lhs, getDefinition(a.lhs, r, reachingDefs)) => (k, v) + case (k, v) if RegisterWrapperPartialEquality(k.variable, k.assigns) == RegisterWrapperPartialEquality(a.lhs, getDefinition(a.lhs, r, reachingDefs)) => (k, v) } if (lhsWrappers.nonEmpty) { - s ++ lhsWrappers.map((k, v) => (k, v.union(eval(a.rhs, s, r)))) + s ++ lhsWrappers.map((k, v) => (RegisterWrapperEqualSets(k.variable, k.assigns ++ getDefinition(a.lhs, r, reachingDefs)), v.union(eval(a.rhs, s, r)))) } else { s + (RegisterWrapperEqualSets(a.lhs, getDefinition(a.lhs, r, reachingDefs)) -> eval(a.rhs, s, n)) } @@ -190,5 +190,5 @@ trait ConstantPropagationWithSSA(val program: Program, val reachingDefs: Map[CFG class ConstantPropagationSolverWithSSA(program: Program, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]) extends ConstantPropagationWithSSA(program, reachingDefs) with SimplePushDownWorklistFixpointSolver[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], MapLattice[RegisterWrapperEqualSets, Set[BitVecLiteral], ConstantPropagationLatticeWithSSA]] - with IRIntraproceduralForwardDependencies + with IRInterproceduralForwardDependencies with Analysis[Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]]] diff --git a/src/main/scala/analysis/BACKUPLAttice b/src/main/scala/analysis/BACKUPLAttice new file mode 100644 index 000000000..b68fae9c1 --- /dev/null +++ b/src/main/scala/analysis/BACKUPLAttice @@ -0,0 +1,834 @@ +package analysis + +import ir._ +import analysis.BitVectorEval._ +import util.Logger +import math.pow + +/** Basic lattice + */ +trait Lattice[T]: + + type Element = T + /** The bottom element of this lattice. + */ + val bottom: T + + /** The top element of this lattice. Default: not implemented. + */ + def top: T = ??? + + /** The least upper bound of `x` and `y`. + */ + def lub(x: T, y: T): T + + /** Returns true whenever `x` <= `y`. + */ + def leq(x: T, y: T): Boolean = lub(x, y) == y // rarely used, but easy to implement :-) + +//trait StridedInterval[+T] +// +//case class SI[T](s: T, l: T, u: T) extends StridedInterval[T] { +// override def toString = s"SI $s [$l, $u]" +//} +// +//case object SIBottom extends StridedInterval[BitVecLiteral] { +// override def toString = "SIBot" +//} + +///** +// * SI class that represents a strided interval +// * s is the stride +// * l is the lower bound +// * u is the upper bound +// * [l, u] is the interval +// * [l, u] \ s is the set of values +// * 0[l,l] represents the singleton set {l} +// */ +//class StridedIntervalLattice extends Lattice[StridedInterval[BitVecLiteral]] { +// val lowestPossibleValue: BitVecLiteral = BitVecLiteral(0, 64) +// val highestPossibleValue: BitVecLiteral = BitVecLiteral(Long.MaxValue - 1, 64) +// +// override val bottom: StridedInterval[BitVecLiteral] = SIBottom +// override def top: StridedInterval[BitVecLiteral] = SI(BitVecLiteral(1, 64), lowestPossibleValue, highestPossibleValue) +// +// def gamma(x: StridedInterval[BitVecLiteral]): Set[BitVecLiteral] = x match { +// case SIBottom => Set.empty +// case SI(s, l, u) => +// bitVec_interval(l, u, s) +// } +// +// /** S1[L1, U1] join S2[L2, U2] -> gcd(S1, S2)[min(L1, L2), max(U1, U2)] */ +// override def lub(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => t +// case (t, SIBottom) => t +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// SI(bitVec_gcd(s1, s2), bitVec_min(l1, l2), bitVec_max(u1, u2)) +// } +// } +// +// /** S1[L1, U1] meet S2[L2, U2] -> gcd(S1, S2)[max(L1, L2), min(U1, U2)] */ +// def meet(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => SIBottom +// case (t, SIBottom) => SIBottom +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// SI(bitVec_gcd(s1, s2), bitVec_max(l1, l2), bitVec_min(u1, u2)) +// } +// } +// +// /** Addition +// * Addition defined in page 6 Figure 2 of: https://dl.acm.org/doi/pdf/10.1145/1111542.1111560 +// * */ +// def add(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => t +// case (t, SIBottom) => t +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// val lbound = smt_bvadd(l1, l2) +// val ubound = smt_bvadd(u1, u2) +// val s = bitVec_gcd(s1, s2) +// if (smt_bvsle(ubound, highestPossibleValue) == TrueLiteral && smt_bvsge(lbound, lowestPossibleValue) == TrueLiteral) { +// SI(s, lbound, ubound) +// } else { +// throw new IllegalArgumentException(s"Addition overflow: $lbound, $ubound") +// } +// } +// } +// +// /** Unary Minus */ +// def unaryMinus(x: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// x match { +// case SIBottom => SIBottom +// case SI(s, l, u) => +// if (smt_bvcomp(l, u) == BitVecLiteral(1, 1) && (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(1, 1) && smt_bvcomp(u, lowestPossibleValue) == BitVecLiteral(1, 1))) { +// SI(BitVecLiteral(0, 64), lowestPossibleValue, lowestPossibleValue) +// } else if (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(0, 1)) { +// SI(s, smt_bvneg(u), smt_bvneg(l)) +// } +// else { +// SI(BitVecLiteral(1, 64), lowestPossibleValue, highestPossibleValue) +// } +// } +// } +// +// /** Substraction */ +// def sub(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// add(x, unaryMinus(y)) +// } +// +// /** Widen */ +// def widen(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// /* formula for widening: +// given: s1[lb1, ub1] and s2[lb2, ub2] +// then: gcd(s1, s2)[min(lb1, lb2), max(ub1, ub2)] +// where: min(lb1, lb2) = lb1 if lb1 <= lb2 +// and: min(lb1, lb2) = minPossibleValue otherwise +// where: max(ub1, ub2) = ub1 if ub1 >= ub2 +// and: max(ub1, ub2) = maxPossibleValue otherwise +// +// assuming: +// minPossibleValue = lowestPossibleValue +// maxPossibleValue = highestPossibleValue + (lb - 1) mod s +// */ +// (x, y) match { +// case (SIBottom, t) => ??? +// case (t, SIBottom) => ??? +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// val s = bitVec_gcd(s1, s2) +// val l = if (smt_bvule(l1, l2) == TrueLiteral) l1 else lowestPossibleValue +// val u = if (smt_bvuge(u1, u2) == TrueLiteral) u1 else smt_bvsmod(smt_bvadd(highestPossibleValue, smt_bvsub(l1, BitVecLiteral(1, 64))), s) +// SI(s, l, u) +// } +// } +// +// /** +// * Calculating strided interval for a list of values using accumulative gcd. +// * @param x the list of values +// * @return the strided interval representing the values in the list +// */ +// def valuesToSI(x: List[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// if (x.isEmpty) { +// SIBottom +// } else { +// val l = bitVec_min(x) +// val u = bitVec_max(x) +// val initialStride = smt_bvsub(u, l) +// val stride = x.foldLeft(initialStride) { +// case (acc, v) => bitVec_gcd(smt_bvsub(v, l), acc) +// } +// SI(stride, l, u) +// } +// } +//} + + +trait StridedWrappedInterval + +case class SI(s: BigInt, l: BigInt, u: BigInt, w: BigInt) extends StridedWrappedInterval { + if (l == u) { + require(s == 0) + } + override def toString = s"SASI $s [$l, $u] $w" +} + +case object SIBottom extends StridedWrappedInterval { + override def toString = "SASIBot" +} + +// TOP is 1[0^w, 1^w]w +case object SITop extends StridedWrappedInterval { + override def toString = "SASITop" +} + +class SASILattice extends Lattice[StridedWrappedInterval] { + val lowestPossibleValue: BigInt = 0 + val highestPossibleValue: BigInt = Long.MaxValue - 1 + + override val bottom: StridedWrappedInterval = SIBottom + + override def top: StridedWrappedInterval = SITop + +// def gamma(x: StridedWrappedInterval): Set[BitVecLiteral] = x match { +// case SIBottom => Set.empty +// case SI(s, l, u, w) => +// if (s == BitVecLiteral(0, 64)) { // singleton set +// Set(l) +// } else { +// bitVec_interval(l, u, s) +// } +// } + + def isSingleValue(x: StridedWrappedInterval): Boolean = x match { + case SI(s, l, u, w) => s == 0 && l == u + case _ => false + } + + def modularPlus(a: BigInt, b: BigInt, w: BigInt): BigInt = { + (a + b) mod BigInt(2).pow(w.toInt) + } + + def modularMinus(a: BigInt, b: BigInt, w: BigInt): BigInt = { + (a - b) mod BigInt(2).pow(w.toInt) + } + + def modularLEQ(a: BigInt, b: BigInt, x: BigInt, w: BigInt): Boolean = { + modularMinus(a, x, w) <= modularMinus(b, x, w) + } + + def membershipFunction(v: BigInt, r: StridedWrappedInterval): Boolean = { + r match { + case SIBottom => false + case SITop => true + case SI(sr, lb, ub, w) => + modularLEQ(v, ub, lb, w) && (modularMinus(v, lb, w) mod sr) == 0 + } + } + + def cardinalityFunction(r: StridedWrappedInterval, w: BigInt): BigInt = { + r match { + case SIBottom => 0 + case SITop => BigInt(2).pow(w.toInt) + case SI(sr, lb, ub, w) => ((ub - lb + 1) / sr) // TODO: this may need to be a math.floor operation + } + } + + def orderingOperator(r: StridedWrappedInterval, t: StridedWrappedInterval): Boolean = { + if (r == SITop && t != SITop) { + false + } else if (r == SIBottom || t == SITop) { + true + } else { + (r, t) match { + case (SI(sr, a, b, w1), SI(st, c, d, w2)) => + if ((a == c) && (b == d) && ((sr mod st) == 0)) { + return true + } + membershipFunction(a, t) && membershipFunction(b, t) && (!membershipFunction(c, r) || !membershipFunction(d, r)) && ((a - c) mod st) == 0 && (sr mod st) == 0 + case _ => false + } + } + } + + /** S1[L1, U1] join S2[L2, U2] -> gcd(S1, S2)[min(L1, L2), max(U1, U2)] */ + override def lub(r: StridedWrappedInterval, t: StridedWrappedInterval): StridedWrappedInterval = { +// (s, t) match { +// case (SIBottom, t) => t +// case (t, SIBottom) => t +// case (SI(a, b, u1, w1), SI(s2, c, d, w2)) => +// var u: BigInt = 0 +// var l: BigInt = 0 +// if (isSingleValue(s) && isSingleValue(t)) { +// val si1_card = WCardMod() +// val si2_card = WCardMod() +// if (si1_card <= si2_card) { +// l = a +// u = d +// } else { +// l = c +// u = b +// } +// +// SI(u - l, l, u, ) +// } +// } + + (r, t) match { + case (SI(sr, a, b, w1), SI(st, c, d, w2)) => + assert(w1 == w2) + val w = w1 // TODO: should this be the largest? + if (orderingOperator(r, t)) { + return t + } + if (orderingOperator(t, r)) { + return r + } + if (membershipFunction(a, t) && membershipFunction(b, t) && membershipFunction(c, r) && membershipFunction(d, r)) { + return SITop + } + if (membershipFunction(c, r) && membershipFunction(b, t) && !membershipFunction(a, t) && !membershipFunction(d, r)) { + return SI(sr.gcd(st).gcd(modularMinus(d, a, w)), a, d, w) + } + if (membershipFunction(a, t) && membershipFunction(d, r) && !membershipFunction(c, r) && !membershipFunction(b, t)) { + return SI(sr.gcd(st).gcd(modularMinus(b, c, w)), c, b, w) + } + val sad = SI(sr.gcd(st).gcd(modularMinus(d, a, w)), a, d, w) + val scb = SI(sr.gcd(st).gcd(modularMinus(b, c, w)), c, b, w) + if (!membershipFunction(a, t) && !membershipFunction(d, r) && !membershipFunction(c, r) && !membershipFunction(b, t) && cardinalityFunction(sad, w) <= cardinalityFunction(scb, w)) { + return sad + } + return scb + case _ => ??? + } + } + + def singletonSI(v: BigInt, w: BigInt): StridedWrappedInterval = { + SI(0, v, v, w) + } + + /** + * s + t = + * BOT if s = BOT or t = BOT + * gcd(s, t)(|a +w c, b +w d|) if s = (|a, b|), t = (|c, d|) and #s + #t <= 2^w + * @param s + * @param t + * @return + */ + def add(s: StridedWrappedInterval, t: StridedWrappedInterval): StridedWrappedInterval = { + (s, t) match { + case (SIBottom, _) => SIBottom // TODO: is this correct? + case (_, SIBottom) => SIBottom // TODO: is this correct? + case (SI(ss, a, b, w1), SI(st, c, d, w2)) if (cardinalityFunction(s, w1) + cardinalityFunction(t, w2)) <= BigInt(2).pow(w1.toInt) => + assert(w1 == w2) + return SI(ss.gcd(st), modularPlus(a, c, w1), modularPlus(b, d, w1), w1) + case _ => SITop + } + } + + def add(s: StridedWrappedInterval, t: BigInt, w: BigInt): StridedWrappedInterval = { + (s, t) match { + case (SIBottom, _) => SIBottom // TODO: is this correct? + case (SI(ss, a, b, w1), t) => + return add(s, singletonSI(t, w)) + case _ => SITop + } + } + + + + +// /** S1[L1, U1] meet S2[L2, U2] -> gcd(S1, S2)[max(L1, L2), min(U1, U2)] */ +// def meet(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => SIBottom +// case (t, SIBottom) => SIBottom +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// SI(bitVec_gcd(s1, s2), bitVec_max(l1, l2), bitVec_min(u1, u2)) +// } +// } +// +// /** Addition +// * Addition defined in page 6 Figure 2 of: https://dl.acm.org/doi/pdf/10.1145/1111542.1111560 +// * */ +// def add(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => t +// case (t, SIBottom) => t +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// val lbound = smt_bvadd(l1, l2) +// val ubound = smt_bvadd(u1, u2) +// val s = bitVec_gcd(s1, s2) +// if (smt_bvsle(ubound, highestPossibleValue) == TrueLiteral && smt_bvsge(lbound, lowestPossibleValue) == TrueLiteral) { +// SI(s, lbound, ubound) +// } else { +// throw new IllegalArgumentException(s"Addition overflow: $lbound, $ubound") +// } +// } +// } +// +// /** Unary Minus */ +// def unaryMinus(x: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// x match { +// case SIBottom => SIBottom +// case SI(s, l, u) => +// if (smt_bvcomp(l, u) == BitVecLiteral(1, 1) && (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(1, 1) && smt_bvcomp(u, lowestPossibleValue) == BitVecLiteral(1, 1))) { +// SI(BitVecLiteral(0, 64), lowestPossibleValue, lowestPossibleValue) +// } else if (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(0, 1)) { +// SI(s, smt_bvneg(u), smt_bvneg(l)) +// } +// else { +// SI(BitVecLiteral(1, 64), lowestPossibleValue, highestPossibleValue) +// } +// } +// } +// +// /** Substraction */ +// def sub(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// add(x, unaryMinus(y)) +// } +// +// /** Widen */ +// def widen(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// /* formula for widening: +// given: s1[lb1, ub1] and s2[lb2, ub2] +// then: gcd(s1, s2)[min(lb1, lb2), max(ub1, ub2)] +// where: min(lb1, lb2) = lb1 if lb1 <= lb2 +// and: min(lb1, lb2) = minPossibleValue otherwise +// where: max(ub1, ub2) = ub1 if ub1 >= ub2 +// and: max(ub1, ub2) = maxPossibleValue otherwise +// +// assuming: +// minPossibleValue = lowestPossibleValue +// maxPossibleValue = highestPossibleValue + (lb - 1) mod s +// */ +// (x, y) match { +// case (SIBottom, t) => ??? +// case (t, SIBottom) => ??? +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// val s = bitVec_gcd(s1, s2) +// val l = if (smt_bvule(l1, l2) == TrueLiteral) l1 else lowestPossibleValue +// val u = if (smt_bvuge(u1, u2) == TrueLiteral) u1 else smt_bvsmod(smt_bvadd(highestPossibleValue, smt_bvsub(l1, BitVecLiteral(1, 64))), s) +// SI(s, l, u) +// } +// } +// +// /** +// * Calculating strided interval for a list of values using accumulative gcd. +// * +// * @param x the list of values +// * @return the strided interval representing the values in the list +// */ +// def valuesToSI(x: List[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// if (x.isEmpty) { +// SIBottom +// } else { +// val l = bitVec_min(x) +// val u = bitVec_max(x) +// val initialStride = smt_bvsub(u, l) +// val stride = x.foldLeft(initialStride) { +// case (acc, v) => bitVec_gcd(smt_bvsub(v, l), acc) +// } +// SI(stride, l, u) +// } +// } +} + +trait ValueSet[+T] + +case class VS[T](m: Map[T, StridedWrappedInterval]) extends ValueSet[T] { + override def toString: String = m.toString +} + +case object VSBottom extends ValueSet[Nothing] { + override def toString = "VSBot" +} + +case object VSTop extends ValueSet[Nothing] { + override def toString = "VSTop" +} + +/** The lattice of integers with the standard ordering. + */ +class ValueSetLattice[T] extends Lattice[ValueSet[T]] { + + override val bottom: ValueSet[T] = VSBottom + override def top: ValueSet[T] = VSTop + + val lattice: SASILattice = SASILattice() + + override def lub(x: ValueSet[T], y: ValueSet[T]): ValueSet[T] = { + (x, y) match { + case (VSBottom, t) => t + case (t, VSBottom) => t + case (VSTop, _) => VSTop + case (_, VSTop) => VSTop + case (VS(m1), VS(m2)) => + VS(m1.keys.foldLeft(m2) { + case (acc, k) => + val v1 = m1(k) + val v2 = m2(k) + acc + (k -> lattice.lub(v1, v2)) + }) + } + } + +// def meet(x: ValueSet[String], y: ValueSet[String]): ValueSet[String] = { +// (x, y) match { +// case (VSBottom, t) => VSBottom +// case (t, VSBottom) => VSBottom +// case (VSTop, _) => y +// case (_, VSTop) => x +// case (VS(m1), VS(m2)) => +// VS(m1.keys.foldLeft(m2) { +// case (acc, k) => +// val v1 = m1(k) +// val v2 = m2(k) +// acc + (k -> lattice.meet(v1, v2)) +// }) +// } +// } + + def add(x: ValueSet[T], y: ValueSet[T]): ValueSet[T] = { + (x, y) match { + case (VSBottom, t) => t + case (t, VSBottom) => t + case (VSTop, _) => VSTop + case (_, VSTop) => VSTop + case (VS(m1), VS(m2)) => + VS(m1.keys.foldLeft(m2) { + case (acc, k) => + val v1 = m1(k) + val v2 = m2(k) + acc + (k -> lattice.add(v1, v2)) + }) + } + } + + def add(x: ValueSet[T], y: BitVecLiteral): ValueSet[T] = { + x match { + case VSBottom => VSBottom + case VSTop => VSTop + case VS(m) => + VS(m.map { + case (k, s) => k -> lattice.add(s, y.value, y.size) // TODO: is the size correct here? + }) + } + } + + def widen(vs1: ValueSet[T], vs2: ValueSet[T]): ValueSet[T] = { + (vs1, vs2) match { + case (VSBottom, t) => ??? + case (t, VSBottom) => ??? + case (VSTop, _) => VSTop + case (_, VSTop) => VSTop + case (VS(m1), VS(m2)) => + VS(m1.keys.foldLeft(m2) { + case (acc, k) => + val v1 = m1(k) + val v2 = m2(k) + acc + (k -> lattice.widen(v1, v2)) + }) + } + } + + def removeLowerBounds(vs: ValueSet[T]): ValueSet[T] = { + vs match { + case VSBottom => VSBottom + case VSTop => VSTop + case VS(m) => + VS(m.map { + case (k, SI(s, l, u, w)) => k -> SI(s, lattice.lowestPossibleValue, u, w) + }) + } + } + + def removeUpperBound(vs: ValueSet[T]): ValueSet[T] = { + vs match { + case VSBottom => VSBottom + case VSTop => VSTop + case VS(m) => + VS(m.map { + case (k, SI(s, l, u, w)) => k -> SI(s, l, lattice.highestPossibleValue, w) + }) + } + } + + /** + * ∗(vs, s): Returns a pair of sets (F, P). F represents the set of “fully accessed” a-locs: it + * consists of the a-locs that are of size s and whose starting addresses are in vs. P represents + * the set of “partially accessed” a-locs: it consists of (i) a-locs whose starting addresses are in + * vs but are not of size s, and (ii) a-locs whose addresses are in vs but whose starting addresses + * and sizes do not meet the conditions to be in F. [Reference VSA paper] + * + * @param vsR2 + * @param s size of the dereference + * @return + */ + def dereference(s: BigInt, vs: ValueSet[String], mmm: MemoryModelMap): (Set[MemoryRegion], Set[MemoryRegion]) = { + vs match { + case VSBottom => VSBottom + case VSTop => ??? //TODO: should this return everything? + case VS(m) => + for (elem <- m) { + if (elem._2 != lattice.bottom) { // region SI defined + elem._2 match { + case SI(stride, lower, upper) => + val gamma: Set[BitVecLiteral] = lattice.gamma(SI(stride, lower, upper)) + // TODO: Global memory size can be retrieved from the symbol table and are of size s + // Map addresses to exact memory locations + val fullyAccessedLocations = gamma.toList.flatMap(address => mmm.findStackFullAccessesOnly(address.value, s)) + + // Identify partially accessed locations (if any) + val partiallyAccessedLocations = gamma.toList.flatMap(address => mmm.findStackPartialAccessesOnly(address.value, s)) + + // Return the set of fully accessed locations and the set of partially accessed locations + return (fullyAccessedLocations.diff(partiallyAccessedLocations).asInstanceOf[Set[MemoryRegion]], partiallyAccessedLocations.asInstanceOf[Set[MemoryRegion]]) + case _ => ??? + } + } + } + } + (Set.empty, Set.empty) + } +} + + + +/** The powerset lattice of a set of elements of type `A` with subset ordering. + */ +class PowersetLattice[A] extends Lattice[Set[A]] { + val bottom: Set[A] = Set.empty + def lub(x: Set[A], y: Set[A]): Set[A] = x.union(y) +} + +// Single element lattice (using Option) +class SingleElementLattice[T] extends Lattice[Option[T]] { + val bottom: Option[T] = None + def lub(x: Option[T], y: Option[T]): Option[T] = (x, y) match { + case (None, None) => None + case _ => Some(x.getOrElse(y.get)) + } +} + +trait LiftedElement[+T] +case class Lift[T](el: T) extends LiftedElement[T] { + override def toString = s"Lift($el)" +} +case object LiftedBottom extends LiftedElement[Nothing] { + override def toString = "LiftBot" +} +/** + * The lift lattice for `sublattice`. + * Supports implicit lifting and unlifting. + */ +class LiftLattice[T, +L <: Lattice[T]](val sublattice: L) extends Lattice[LiftedElement[T]] { + + val bottom: LiftedElement[T] = LiftedBottom + + def lub(x: LiftedElement[T], y: LiftedElement[T]): LiftedElement[T] = + (x, y) match { + case (LiftedBottom, t) => t + case (t, LiftedBottom) => t + case (Lift(a), Lift(b)) => Lift(sublattice.lub(a, b)) + } + + /** + * Lift elements of the sublattice to this lattice. + * Note that this method is declared as implicit, so the conversion can be done automatically. + */ + def lift(x: T): LiftedElement[T] = Lift(x) + + /** + * Un-lift elements of this lattice to the sublattice. + * Throws an IllegalArgumentException if trying to unlift the bottom element + * Note that this method is declared as implicit, so the conversion can be done automatically. + */ + def unlift(x: LiftedElement[T]): T = x match { + case Lift(s) => s + case LiftedBottom => throw new IllegalArgumentException("Cannot unlift bottom") + } +} + +trait TwoElement + +case object TwoElementTop extends TwoElement +case object TwoElementBottom extends TwoElement + + +/** + * A lattice with only top and bottom + */ +class TwoElementLattice extends Lattice[TwoElement]: + override val bottom: TwoElement = TwoElementBottom + override val top: TwoElement = TwoElementTop + + def lub(x: TwoElement, y: TwoElement): TwoElement = (x, y) match { + case (TwoElementBottom, TwoElementBottom) => TwoElementBottom + case _ => TwoElementTop + } + +trait FlatElement[+T] +case class FlatEl[T](el: T) extends FlatElement[T] +case object Top extends FlatElement[Nothing] +case object Bottom extends FlatElement[Nothing] + +/** The flat lattice made of element of `X`. Top is greater than every other element, and Bottom is less than every + * other element. No additional ordering is defined. + */ +class FlatLattice[X] extends Lattice[FlatElement[X]] { + + val bottom: FlatElement[X] = Bottom + + override val top: FlatElement[X] = Top + + def lub(x: FlatElement[X], y: FlatElement[X]): FlatElement[X] = (x, y) match { + case (a, Bottom) => a + case (Bottom, b) => b + case (a, b) if a == b => a + case (Top, _) => Top + case (_, Top) => Top + case _ => Top + } +} + +class TupleLattice[L1 <: Lattice[T1], L2 <: Lattice[T2], T1, T2](val lattice1: L1, val lattice2: L2) extends Lattice[(T1, T2)] { + override val bottom: (T1, T2) = (lattice1.bottom, lattice2.bottom) + + override def lub(x: (T1, T2), y: (T1, T2)): (T1, T2) = { + val (x1, x2) = x + val (y1, y2) = y + (lattice1.lub(x1, y1), lattice2.lub(x2, y2)) + } + + override def leq(x: (T1, T2), y: (T1, T2)): Boolean = { + val (x1, x2) = x + val (y1, y2) = y + lattice1.leq(x1, y1) && lattice2.leq(x2, y2) + } + + override def top: (T1, T2) = (lattice1.top, lattice2.top) +} + +//trait StridedIntervalLattice[T] extends Lattice[(T, T, T)] { +// override val bottom: (T, T, T) = (???, ???, ???) +// +// override def lub(x: (T1, T2), y: (T1, T2)): (T1, T2) = { +// val (x1, x2) = x +// val (y1, y2) = y +// (lattice1.lub(x1, y1), lattice2.lub(x2, y2)) +// } +// +// override def leq(x: (T1, T2), y: (T1, T2)): Boolean = { +// val (x1, x2) = x +// val (y1, y2) = y +// lattice1.leq(x1, y1) && lattice2.leq(x2, y2) +// } +// +// override def top: (T1, T2) = (lattice1.top, lattice2.top) +//} + +/** A lattice of maps from a set of elements of type `A` to a lattice with element `L'. Bottom is the default value. + */ +class MapLattice[A, T, +L <: Lattice[T]](val sublattice: L) extends Lattice[Map[A, T]] { + val bottom: Map[A, T] = Map().withDefaultValue(sublattice.bottom) + def lub(x: Map[A, T], y: Map[A, T]): Map[A, T] = + x.keys.foldLeft(y)((m, a) => m + (a -> sublattice.lub(x(a), y(a)))).withDefaultValue(sublattice.bottom) +} + +/** Constant propagation lattice. + * + */ +class ConstantPropagationLattice extends FlatLattice[BitVecLiteral] { + private def apply(op: (BitVecLiteral, BitVecLiteral) => BitVecLiteral, a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = try { + (a, b) match + case (FlatEl(x), FlatEl(y)) => FlatEl(op(x, y)) + case (Bottom, _) => Bottom + case (_, Bottom) => Bottom + case (_, Top) => Top + case (Top, _) => Top + } catch { + case e: Exception => + Logger.error(s"Failed on op $op with $a and $b") + throw e + } + + private def apply(op: BitVecLiteral => BitVecLiteral, a: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = a match + case FlatEl(x) => FlatEl(op(x)) + case Top => Top + case Bottom => Bottom + + def bv(a: BitVecLiteral): FlatElement[BitVecLiteral] = FlatEl(a) + def bvadd(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvadd, a, b) + def bvsub(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvsub, a, b) + def bvmul(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvmul, a, b) + def bvudiv(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvudiv, a, b) + def bvsdiv(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvsdiv, a, b) + def bvsrem(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvsrem, a, b) + def bvurem(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvurem, a, b) + def bvsmod(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvsmod, a, b) + def bvand(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvand, a, b) + def bvor(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvor, a, b) + def bvxor(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvxor, a, b) + def bvnand(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvnand, a, b) + def bvnor(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvnor, a, b) + def bvxnor(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvxnor, a, b) + def bvnot(a: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvnot, a) + def bvneg(a: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvneg, a) + def bvshl(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvshl, a, b) + def bvlshr(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvlshr, a, b) + def bvashr(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvashr, a, b) + def bvcomp(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvcomp, a, b) + def zero_extend(width: Int, a: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_zero_extend(width, _: BitVecLiteral), a) + def sign_extend(width: Int, a: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_sign_extend(width, _: BitVecLiteral), a) + def extract(high: Int, low: Int, a: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = + apply(BitVectorEval.boogie_extract(high, low, _: BitVecLiteral), a) + def concat(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_concat, a, b) +} + +/** Constant propagation lattice. + * + */ +class ConstantPropagationLatticeWithSSA extends PowersetLattice[BitVecLiteral] { + private def apply(op: (BitVecLiteral, BitVecLiteral) => BitVecLiteral, a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = + val res = for { + x <- a + y <- b + } yield op(x, y) + res + + private def apply(op: BitVecLiteral => BitVecLiteral, a: Set[BitVecLiteral]): Set[BitVecLiteral] = + val res = for { + x <- a + } yield op(x) + res + + def bv(a: BitVecLiteral): Set[BitVecLiteral] = Set(a) + def bvadd(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvadd, a, b) + def bvsub(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvsub, a, b) + def bvmul(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvmul, a, b) + def bvudiv(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvudiv, a, b) + def bvsdiv(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvsdiv, a, b) + def bvsrem(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvsrem, a, b) + def bvurem(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvurem, a, b) + def bvsmod(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvsmod, a, b) + def bvand(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvand, a, b) + def bvor(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvor, a, b) + def bvxor(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvxor, a, b) + def bvnand(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvnand, a, b) + def bvnor(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvnor, a, b) + def bvxnor(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvxnor, a, b) + def bvnot(a: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvnot, a) + def bvneg(a: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvneg, a) + def bvshl(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvshl, a, b) + def bvlshr(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvlshr, a, b) + def bvashr(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvashr, a, b) + def bvcomp(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvcomp, a, b) + def zero_extend(width: Int, a: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_zero_extend(width, _: BitVecLiteral), a) + def sign_extend(width: Int, a: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_sign_extend(width, _: BitVecLiteral), a) + + def extract(high: Int, low: Int, a: Set[BitVecLiteral]): Set[BitVecLiteral] = + apply(BitVectorEval.boogie_extract(high, low, _: BitVecLiteral), a) + + def concat(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_concat, a, b) +} \ No newline at end of file diff --git a/src/main/scala/analysis/BitVectorEval.scala b/src/main/scala/analysis/BitVectorEval.scala index a3da4de13..23cd152c0 100644 --- a/src/main/scala/analysis/BitVectorEval.scala +++ b/src/main/scala/analysis/BitVectorEval.scala @@ -329,20 +329,28 @@ object BitVectorEval { } } - def smt_min(s: BitVecLiteral, t: BitVecLiteral): BitVecLiteral = { + def bitVec_min(s: BitVecLiteral, t: BitVecLiteral): BitVecLiteral = { if (smt_bvslt(s, t) == TrueLiteral) s else t } - def smt_max(s: BitVecLiteral, t: BitVecLiteral): BitVecLiteral = { + def bitVec_min(s: List[BitVecLiteral]): BitVecLiteral = { + s.reduce(bitVec_min) + } + + def bitVec_max(s: BitVecLiteral, t: BitVecLiteral): BitVecLiteral = { if (smt_bvslt(s, t) == TrueLiteral) t else s } + def bitVec_max(s: List[BitVecLiteral]): BitVecLiteral = { + s.reduce(bitVec_max) + } + @tailrec - def smt_gcd(a: BitVecLiteral, b: BitVecLiteral): BitVecLiteral = { - if (b.value == 0) a else smt_gcd(b, smt_bvsmod(a, b)) + def bitVec_gcd(a: BitVecLiteral, b: BitVecLiteral): BitVecLiteral = { + if (b.value == 0) a else bitVec_gcd(b, smt_bvsmod(a, b)) } - def smt_interval(lb: BitVecLiteral, ub: BitVecLiteral, step: BitVecLiteral): Set[BitVecLiteral] = { + def bitVec_interval(lb: BitVecLiteral, ub: BitVecLiteral, step: BitVecLiteral): Set[BitVecLiteral] = { require(smt_bvule(lb, ub) == TrueLiteral, "Lower bound must be less than or equal to upper bound") (lb.value to ub.value by step.value).map(BitVecLiteral(_, lb.size)).toSet } diff --git a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala index 93df47156..493f36e9f 100644 --- a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala +++ b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala @@ -5,14 +5,21 @@ import ir.* import util.Logger import scala.collection.mutable +trait RegisterEquality: + def variable: Variable + def assigns: Set[Assign] + /** Wrapper for variables so we can have Steensgaard-specific equals method indirectly * Relies on the SSA sets intersection being non-empty * */ -case class RegisterVariableWrapper(variable: Variable, assigns: Set[Assign]) { +case class RegisterWrapperPartialEquality(variable: Variable, assigns: Set[Assign]) extends RegisterEquality { override def equals(obj: Any): Boolean = { obj match { - case RegisterVariableWrapper(other, otherAssigns) => + case RegisterWrapperPartialEquality(other, otherAssigns) => variable == other && assigns.intersect(otherAssigns).nonEmpty + case RegisterWrapperEqualSets(other, otherAssigns) => + // treat it as Partial Equality + RegisterWrapperPartialEquality(variable, assigns) == RegisterWrapperPartialEquality(other, otherAssigns) case _ => false } @@ -22,11 +29,14 @@ case class RegisterVariableWrapper(variable: Variable, assigns: Set[Assign]) { /** Wrapper for variables so we can have ConstantPropegation-specific equals method indirectly * Relies on SSA sets being exactly the same * */ -case class RegisterWrapperEqualSets(variable: Variable, assigns: Set[Assign]) { +case class RegisterWrapperEqualSets(variable: Variable, assigns: Set[Assign]) extends RegisterEquality { override def equals(obj: Any): Boolean = { obj match { case RegisterWrapperEqualSets(other, otherAssigns) => variable == other && assigns == otherAssigns + case RegisterWrapperPartialEquality(other, otherAssigns) => + // treat it as Partial Equality + RegisterWrapperPartialEquality(variable, assigns) == RegisterWrapperPartialEquality(other, otherAssigns) case _ => false } @@ -39,7 +49,7 @@ case class RegisterWrapperEqualSets(variable: Variable, assigns: Set[Assign]) { class InterprocSteensgaardAnalysis( program: Program, constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], - regionAccesses: Map[CfgNode, Map[RegisterVariableWrapper, FlatElement[Expr]]], + regionAccesses: Map[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]], mmm: MemoryModelMap, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], globalOffsets: Map[BigInt, BigInt]) extends Analysis[Any] { @@ -269,7 +279,7 @@ class InterprocSteensgaardAnalysis( // X = alloc P: [[X]] = ↑[[alloc-i]] if (directCall.target.name == "malloc") { val alloc = HeapRegion(nextMallocCount(), BitVecLiteral(BigInt(0), 0), IRWalk.procedure(cmd)) - unify(IdentifierVariable(RegisterVariableWrapper(mallocVariable, getUse(mallocVariable, cmd, reachingDefs))), PointerRef(AllocVariable(alloc))) + unify(IdentifierVariable(RegisterWrapperPartialEquality(mallocVariable, getUse(mallocVariable, cmd, reachingDefs))), PointerRef(AllocVariable(alloc))) } case assign: Assign => @@ -277,7 +287,7 @@ class InterprocSteensgaardAnalysis( case binOp: BinaryExpr => // X1 = &X2: [[X1]] = ↑[[X2]] exprToRegion(binOp, cmd).foreach( - x => unify(IdentifierVariable(RegisterVariableWrapper(assign.lhs, getDefinition(assign.lhs, cmd, reachingDefs))), PointerRef(AllocVariable(x))) + x => unify(IdentifierVariable(RegisterWrapperPartialEquality(localAssign.lhs, getDefinition(assign.lhs, cmd, reachingDefs))), PointerRef(AllocVariable(x))) ) // TODO: should lookout for global base + offset case as well case _ => @@ -290,7 +300,7 @@ class InterprocSteensgaardAnalysis( X2_star.foreach( x => unify(ExpressionVariable(x), PointerRef(alpha)) ) - unify(alpha, IdentifierVariable(RegisterVariableWrapper(X1, getDefinition(X1, cmd, reachingDefs)))) + unify(alpha, IdentifierVariable(RegisterWrapperPartialEquality(X1, getDefinition(X1, cmd, reachingDefs)))) Logger.debug("Memory load: " + memoryLoad) Logger.debug("Index: " + memoryLoad.index) @@ -302,13 +312,13 @@ class InterprocSteensgaardAnalysis( // X1 = &X: [[X1]] = ↑[[X2]] (but for globals) val $X2 = exprToRegion(memoryLoad.index, cmd) $X2.foreach( - x => unify(IdentifierVariable(RegisterVariableWrapper(assign.lhs, getDefinition(assign.lhs, cmd, reachingDefs))), PointerRef(AllocVariable(x))) + x => unify(IdentifierVariable(RegisterWrapperPartialEquality(assign.lhs, getDefinition(assign.lhs, cmd, reachingDefs))), PointerRef(AllocVariable(x))) ) case variable: Variable => // X1 = X2: [[X1]] = [[X2]] val X1 = assign.lhs val X2 = variable - unify(IdentifierVariable(RegisterVariableWrapper(X1, getDefinition(X1, cmd, reachingDefs))), IdentifierVariable(RegisterVariableWrapper(X2, getUse(X2, cmd, reachingDefs)))) + unify(IdentifierVariable(RegisterWrapperPartialEquality(X1, getDefinition(X1, cmd, reachingDefs))), IdentifierVariable(RegisterWrapperPartialEquality(X2, getUse(X2, cmd, reachingDefs)))) case _ => // do nothing } } @@ -349,16 +359,16 @@ class InterprocSteensgaardAnalysis( /** @inheritdoc */ - def pointsTo(): Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]] = { + def pointsTo(): Map[RegisterWrapperPartialEquality, Set[RegisterWrapperPartialEquality | MemoryRegion]] = { val solution = solver.solution() val unifications = solver.unifications() Logger.debug(s"Solution: \n${solution.mkString(",\n")}\n") Logger.debug(s"Sets: \n${unifications.values.map { s => s"{ ${s.mkString(",")} }"}.mkString(", ")}") val vars = solution.keys.collect { case id: IdentifierVariable => id } - val emptyMap = Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]]() + val emptyMap = Map[RegisterWrapperPartialEquality, Set[RegisterWrapperPartialEquality | MemoryRegion]]() val pointsto = vars.foldLeft(emptyMap) { (a, v: IdentifierVariable) => - val pt: Set[RegisterVariableWrapper | MemoryRegion] = unifications(solution(v)).collect { + val pt: Set[RegisterWrapperPartialEquality | MemoryRegion] = unifications(solution(v)).collect { case PointerRef(IdentifierVariable(id)) => id case PointerRef(AllocVariable(alloc)) => alloc }.toSet @@ -370,9 +380,9 @@ class InterprocSteensgaardAnalysis( /** @inheritdoc */ - def mayAlias(): (RegisterVariableWrapper, RegisterVariableWrapper) => Boolean = { + def mayAlias(): (RegisterWrapperPartialEquality, RegisterWrapperPartialEquality) => Boolean = { val solution = solver.solution() - (id1: RegisterVariableWrapper, id2: RegisterVariableWrapper) => + (id1: RegisterWrapperPartialEquality, id2: RegisterWrapperPartialEquality) => val sol1 = solution(IdentifierVariable(id1)) val sol2 = solution(IdentifierVariable(id2)) sol1 == sol2 && sol1.isInstanceOf[PointerRef] // same equivalence class, and it contains a reference @@ -392,7 +402,7 @@ case class AllocVariable(alloc: MemoryRegion) extends StTerm with Var[StTerm] { /** A term variable that represents an identifier in the program. */ -case class IdentifierVariable(id: RegisterVariableWrapper) extends StTerm with Var[StTerm] { +case class IdentifierVariable(id: RegisterWrapperPartialEquality) extends StTerm with Var[StTerm] { override def toString: String = s"$id" } diff --git a/src/main/scala/analysis/LAST_VSA_BACKUP.scala b/src/main/scala/analysis/LAST_VSA_BACKUP.scala new file mode 100644 index 000000000..92bff84b7 --- /dev/null +++ b/src/main/scala/analysis/LAST_VSA_BACKUP.scala @@ -0,0 +1,276 @@ +//package analysis +//import ir.* +//import util.* +// +//import scala.collection.mutable +//import analysis.BitVectorEval.* +//import analysis.* +// +//class ActualVSA(program: Program, +// constantPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], +// reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], +// mmm: MemoryModelMap) { +// +// enum Flag { +// case CF // Carry Flag +// case ZF // Zero Flag +// case SF // Sign Flag +// case PF // Parity Flag +// case AF // Auxiliary Flag +// case OF // Overflow Flag +// } +// +// enum Bool3 { +// case True +// case False +// case Maybe +// } +// +// // TODO: This assumes no function is called Data or Heap (should be a tuple instead) +// val DATA_REGION_NAME = "Data" +// val HEAP_REGION_NAME = "Heap" +// +// val lattice: ValueSetLattice = ValueSetLattice() +// +// type MemRgn = String // all record titles +// +// val MEMORY_REGIONS: List[MemRgn] = (Set(DATA_REGION_NAME, HEAP_REGION_NAME) ++ mmm.getAllocsPerProcedure.keySet).toList.sorted +// val ALLOCS: Map[String, Set[MemoryRegion]] = mmm.getAllocsPerProcedure.asInstanceOf[Map[String, Set[MemoryRegion]]] ++ Map("Data" -> mmm.getAllDataRegions.asInstanceOf[Set[MemoryRegion]], "Heap" -> mmm.getAllHeapRegions.asInstanceOf[Set[MemoryRegion]]) +// val AllocEnv: AlocEnv = AlocEnv() +// +// // /** +// // * ∗(vs, s): Returns a pair of sets (F, P). F represents the set of “fully accessed” a-locs: it +// // * consists of the a-locs that are of size s and whose starting addresses are in vs. P represents +// // * the set of “partially accessed” a-locs: it consists of (i) a-locs whose starting addresses are in +// // * vs but are not of size s, and (ii) a-locs whose addresses are in vs but whose starting addresses +// // * and sizes do not meet the conditions to be in F. [Reference VSA paper] +// // * +// // * @param vsR2 +// // * @param s size of the dereference +// // * @return +// // */ +// // def dereference(s: BigInt): (Set[MemoryRegion], Set[MemoryRegion]) = { +// // // TODO: Global memory size can be retrieved from the symbol table and are of size s +// // // Map addresses to exact memory locations +// // val fullyAccessedLocations = stridedInterval.gamma.flatMap(address => mmm.findStackFullAccessesOnly(address.value, s)) +// // +// // // Identify partially accessed locations (if any) +// // val partiallyAccessedLocations = stridedInterval.gamma.flatMap(address => mmm.findStackPartialAccessesOnly(address.value, s)) +// // +// // // Return the set of fully accessed locations and the set of partially accessed locations +// // (fullyAccessedLocations.diff(partiallyAccessedLocations).asInstanceOf[Set[MemoryRegion]], partiallyAccessedLocations.asInstanceOf[Set[MemoryRegion]]) +// // } +// // } +// +// /** +// * Allocs Structure +// * Procedures +// * main -> {alloc1, alloc2, alloc3} +// * foo -> {alloc4, alloc5} +// * Data +// * Data -> {alloc6, alloc7} +// * Heap +// * Heap -> {alloc8, alloc9} +// */ +// case class AlocEnv() { +// private val envs: mutable.Map[MemRgn, StridedInterval[BitVecLiteral]] = preCalculate() +// private val valueSets: mutable.Map[MemRgn, ValueSet[String]] = mutable.Map[MemRgn, ValueSet[String]]() +// +// def preCalculate(): mutable.Map[MemRgn, StridedInterval[BitVecLiteral]] = { +// val res = mutable.Map[MemRgn, StridedInterval[BitVecLiteral]]() +// MEMORY_REGIONS.foreach(r => { +// res.put(r, getSrtidedIntervals(r)) +// }) +// res +// } +// +// private def getSrtidedIntervals(r: MemRgn): StridedInterval[BitVecLiteral] = { +// // if stack or data we have offset. Otherwise we mark it as bottom VS +// if (r == DATA_REGION_NAME) { +// val allocsThatBelong = ALLOCS(r).asInstanceOf[Set[DataRegion]] +// lattice.lattice.valuesToSI(allocsThatBelong.map(a => a.start).toList) +// } else if (r == HEAP_REGION_NAME) { +// lattice.lattice.bottom +// } else { +// val allocsThatBelong = ALLOCS(r).asInstanceOf[Set[StackRegion]] +// lattice.lattice.valuesToSI(allocsThatBelong.map(a => a.start).toList) +// } +// } +// +// def getVS(r: MemRgn): ValueSet[String] = { +// if (valueSets.contains(r)) { +// valueSets(r) +// } else { +// // map everything that is not r to bottom +// val cpy = envs.clone() +// cpy.keys.foreach(k => if k != r then cpy(k) = lattice.lattice.bottom) +// valueSets.put(r, VS(cpy.toMap)) +// VS(cpy.toMap) +// } +// } +// } +// +// case class AbsEnv(): +// var regEnv: mutable.Map[Variable, VS[String]] = mutable.Map[Variable, VS[String]]().withDefault(_ => lattice.bottom) +// var flagEnv: mutable.Map[Flag, Bool3] = mutable.Map[Flag, Bool3]().withDefault(_ => Bool3.Maybe) +// var alocEnv: AlocEnv = AlocEnv() +// +// def join(absEnv: AbsEnv): AbsEnv = { +// val out = AbsEnv() +// out.regEnv = regEnv.clone() +// out.flagEnv = flagEnv.clone() +// out.alocEnv = alocEnv +// absEnv.regEnv.foreach { case (k, v) => +// out.regEnv(k) = lattice.lub(regEnv(k), v) +// } +// absEnv.flagEnv.foreach { case (k, v) => +// out.flagEnv(k) = ??? +// } +// out +// } +// +// override def toString: String = { +// val env1Str = regEnv.map { case (k, v) => s"$k -> $v" }.mkString("\n\n") +// val env2Str = flagEnv.map { case (k, v) => s"$k -> $v" }.mkString("\n\n") +// val env3Str = alocEnv.toString +// s"Env1:\n\n$env1Str\n\nEnv2:\n\n$env2Str\n\nEnv3:\n\n$env3Str" +// } +// +// def AbstractTransformer(in: AbsEnv, instruction: CFGPosition): AbsEnv = { +// instruction match { +// case p: Procedure => in +// case b: Block => in +// case c: Command => +// c match +// case statement: Statement => +// statement match +// case localAssign: LocalAssign => +// localAssign.rhs match +// case binOp: BinaryExpr => +// if (binOp.arg1.isInstanceOf[Variable]) { +// val R1 = localAssign.lhs +// val R2 = binOp.arg1.asInstanceOf[Variable] +// val c = evaluateExpression(binOp.arg2, constantPropResult(instruction)) +// if (c.isDefined) { +// +// // R1 = R2 + c +// val out = in +// val vs_R2: ValueSet[String] = in.regEnv.get(R2) +// out.regEnv(R1) = lattice.add(vs_R2, c.get) +// return out +// } +// } +// in +// case memoryLoad: MemoryLoad => +// memoryLoad.index match +// case binOp: BinaryExpr => +// if (binOp.arg2.isInstanceOf[Variable]) { +// val R1 = localAssign.lhs +// val R2 = binOp.arg1.asInstanceOf[Variable] // TODO: Is R2 always a variable? +// val out = in +// getDefinition(binOp.arg2.asInstanceOf[Variable], instruction, reachingDefs).foreach { +// d => +// d.rhs match +// case binOp2: BinaryExpr => +// val c1 = evaluateExpression(binOp2.arg1, constantPropResult(instruction)) +// val c2 = evaluateExpression(binOp2.arg2, constantPropResult(instruction)) +// // R1 = *(R2 + c1) + c2 +// val vs_R2: ValueSet[String] = in.regEnv(R2) +// val s = memoryLoad.size // s is the size of dereference performed by the instruction +// val (f: Set[MemoryRegion], p: Set[MemoryRegion]) = lattice.dereference(BigInt(s), vs_R2, mmm) +// println("VSA") +// println(f) +// if (p.isEmpty) { +// val vs_rhs = f.map(r => in.regEnv(r).getAAlloc(r).valueSet).fold(lattice.bottom)(_ join _) +// out.env1(R1) = lattice.add(vs_rhs, c2.get) +// } else { +// out.env1(R1) = lattice.top +// } +// case _ => +// } +// out +// } else { +// in +// } +// case _ => in // TODO: Handle other cases +// case variable: Variable => +// ??? +// // val R1 = localAssign.lhs +// // val R2 = variable +// // // R1 >= R2 +// // val out = in +// // val vs_R1 = in.env1.getOrElseUpdate(R1, ValueSetLattice.BOTTOM) +// // val vs_R2 = in.env1(R2) +// // val vs_lb = vs_R2.removeUpperBounds() +// // val vs_ub = vs_R1.removeLowerBounds() +// // out.env1(R1) = vs_R1.meet(vs_lb) +// // out.env1(R2) = vs_R2.meet(vs_ub) +// // out +// case bitVecLiteral: BitVecLiteral => +// ??? +// // val R1 = localAssign.lhs +// // val c = bitVecLiteral +// // // R1 <= c +// // // from 0 to c, all value sets are possible (ie. stack, global) TODO: this may be wrong because of the _ join _? +// // val interval = bitVec_interval(BitVecLiteral(0, c.size), c, BitVecLiteral(1, c.size)) +// // val regions: mutable.Set[MemoryRegion] = mutable.Set() +// // println(c) +// // interval.foreach(v => +// // val dataObject = mmm.findDataObject(v.value) +// // if dataObject.isDefined then regions.add(dataObject.get) +// // ) +// // TOP_STRIDE.gamma.map(v => regions.add(mmm.findStackObject(v.value).get)) +// // +// // val allValueSets: mutable.Set[ValueSet] = mutable.Set() +// // regions.foreach(r => allValueSets.add(in.env2(r).getAAlloc(r).valueSet)) +// // val vs_c = allValueSets.fold(ValueSetLattice.BOTTOM)(_ join _) +// // val out = in +// // out.env1(R1) = in.env1(R1).meet(vs_c) +// // out +// +// // val vs_c = ValueSet(Set(StridedInterval(smt_gcd(BitVecLiteral(BigInt(0), c.size), c), BitVecLiteral(BigInt(0), c.size), c))) // TODO: Fix ME +// // val out = in +// // out.env1(R1) = in.env1(R1).meet(vs_c) +// // out +// case _ => in // TODO: Handle other cases +// case memoryAssign: MemoryAssign => in // TODO: *(R1 + c1) = R2 + c2 +// case nop: NOP => in +// case assert: Assert => in +// case assume: Assume => in +// case jump: Jump => in +// } +// } +// +// def IntraProceduralVSA(): mutable.Map[CFGPosition, AbsEnv] = { +// val worklist = new mutable.Queue[CFGPosition]() +// worklist.enqueue(program.mainProcedure) +// +// val absEnv_enter = AbsEnv() +// val abstractStates = mutable.Map[CFGPosition, AbsEnv](worklist.head -> absEnv_enter) +// while(worklist.nonEmpty) { +// val n: CFGPosition = worklist.dequeue() +// val m = IntraProcIRCursor.succ(n) +// for (succ <- m) { +// mmm.popContext() +// mmm.pushContext(IRWalk.procedure(n).name) +// val edge_amc = AbstractTransformer(abstractStates(n), succ) +// Propagate(succ, edge_amc) +// } +// } +// +// def Propagate(n: CFGPosition, edge_amc: AbsEnv): Unit = { +// if (!abstractStates.contains(n)) { +// abstractStates(n) = edge_amc +// worklist.enqueue(n) +// } else { +// val oldEnv = abstractStates(n) +// val newEnv = oldEnv.join(edge_amc) +// if (newEnv != oldEnv) { +// abstractStates(n) = newEnv +// worklist.enqueue(n) +// } +// } +// } +// abstractStates +// } +//} diff --git a/src/main/scala/analysis/Lattice.scala b/src/main/scala/analysis/Lattice.scala index 5c3ccd630..34dc8f43a 100644 --- a/src/main/scala/analysis/Lattice.scala +++ b/src/main/scala/analysis/Lattice.scala @@ -1,8 +1,9 @@ package analysis import ir._ -import analysis.BitVectorEval +import analysis.BitVectorEval._ import util.Logger +import math.pow /** Basic lattice */ @@ -25,6 +26,794 @@ trait Lattice[T]: */ def leq(x: T, y: T): Boolean = lub(x, y) == y // rarely used, but easy to implement :-) +//trait StridedInterval[+T] +// +//case class SI[T](s: T, l: T, u: T) extends StridedInterval[T] { +// override def toString = s"SI $s [$l, $u]" +//} +// +//case object SIBottom extends StridedInterval[BitVecLiteral] { +// override def toString = "SIBot" +//} + +///** +// * SI class that represents a strided interval +// * s is the stride +// * l is the lower bound +// * u is the upper bound +// * [l, u] is the interval +// * [l, u] \ s is the set of values +// * 0[l,l] represents the singleton set {l} +// */ +//class StridedIntervalLattice extends Lattice[StridedInterval[BitVecLiteral]] { +// val lowestPossibleValue: BitVecLiteral = BitVecLiteral(0, 64) +// val highestPossibleValue: BitVecLiteral = BitVecLiteral(Long.MaxValue - 1, 64) +// +// override val bottom: StridedInterval[BitVecLiteral] = SIBottom +// override def top: StridedInterval[BitVecLiteral] = SI(BitVecLiteral(1, 64), lowestPossibleValue, highestPossibleValue) +// +// def gamma(x: StridedInterval[BitVecLiteral]): Set[BitVecLiteral] = x match { +// case SIBottom => Set.empty +// case SI(s, l, u) => +// bitVec_interval(l, u, s) +// } +// +// /** S1[L1, U1] join S2[L2, U2] -> gcd(S1, S2)[min(L1, L2), max(U1, U2)] */ +// override def lub(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => t +// case (t, SIBottom) => t +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// SI(bitVec_gcd(s1, s2), bitVec_min(l1, l2), bitVec_max(u1, u2)) +// } +// } +// +// /** S1[L1, U1] meet S2[L2, U2] -> gcd(S1, S2)[max(L1, L2), min(U1, U2)] */ +// def meet(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => SIBottom +// case (t, SIBottom) => SIBottom +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// SI(bitVec_gcd(s1, s2), bitVec_max(l1, l2), bitVec_min(u1, u2)) +// } +// } +// +// /** Addition +// * Addition defined in page 6 Figure 2 of: https://dl.acm.org/doi/pdf/10.1145/1111542.1111560 +// * */ +// def add(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => t +// case (t, SIBottom) => t +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// val lbound = smt_bvadd(l1, l2) +// val ubound = smt_bvadd(u1, u2) +// val s = bitVec_gcd(s1, s2) +// if (smt_bvsle(ubound, highestPossibleValue) == TrueLiteral && smt_bvsge(lbound, lowestPossibleValue) == TrueLiteral) { +// SI(s, lbound, ubound) +// } else { +// throw new IllegalArgumentException(s"Addition overflow: $lbound, $ubound") +// } +// } +// } +// +// /** Unary Minus */ +// def unaryMinus(x: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// x match { +// case SIBottom => SIBottom +// case SI(s, l, u) => +// if (smt_bvcomp(l, u) == BitVecLiteral(1, 1) && (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(1, 1) && smt_bvcomp(u, lowestPossibleValue) == BitVecLiteral(1, 1))) { +// SI(BitVecLiteral(0, 64), lowestPossibleValue, lowestPossibleValue) +// } else if (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(0, 1)) { +// SI(s, smt_bvneg(u), smt_bvneg(l)) +// } +// else { +// SI(BitVecLiteral(1, 64), lowestPossibleValue, highestPossibleValue) +// } +// } +// } +// +// /** Substraction */ +// def sub(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// add(x, unaryMinus(y)) +// } +// +// /** Widen */ +// def widen(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// /* formula for widening: +// given: s1[lb1, ub1] and s2[lb2, ub2] +// then: gcd(s1, s2)[min(lb1, lb2), max(ub1, ub2)] +// where: min(lb1, lb2) = lb1 if lb1 <= lb2 +// and: min(lb1, lb2) = minPossibleValue otherwise +// where: max(ub1, ub2) = ub1 if ub1 >= ub2 +// and: max(ub1, ub2) = maxPossibleValue otherwise +// +// assuming: +// minPossibleValue = lowestPossibleValue +// maxPossibleValue = highestPossibleValue + (lb - 1) mod s +// */ +// (x, y) match { +// case (SIBottom, t) => ??? +// case (t, SIBottom) => ??? +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// val s = bitVec_gcd(s1, s2) +// val l = if (smt_bvule(l1, l2) == TrueLiteral) l1 else lowestPossibleValue +// val u = if (smt_bvuge(u1, u2) == TrueLiteral) u1 else smt_bvsmod(smt_bvadd(highestPossibleValue, smt_bvsub(l1, BitVecLiteral(1, 64))), s) +// SI(s, l, u) +// } +// } +// +// /** +// * Calculating strided interval for a list of values using accumulative gcd. +// * @param x the list of values +// * @return the strided interval representing the values in the list +// */ +// def valuesToSI(x: List[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// if (x.isEmpty) { +// SIBottom +// } else { +// val l = bitVec_min(x) +// val u = bitVec_max(x) +// val initialStride = smt_bvsub(u, l) +// val stride = x.foldLeft(initialStride) { +// case (acc, v) => bitVec_gcd(smt_bvsub(v, l), acc) +// } +// SI(stride, l, u) +// } +// } +//} + + +trait StridedWrappedInterval + +case class SI(s: BigInt, l: BigInt, u: BigInt, w: BigInt) extends StridedWrappedInterval { + if (l == u) { + require(s == 0) + } + override def toString = s"SASI $s [$l, $u] $w" +} + +case object SIBottom extends StridedWrappedInterval { + override def toString = "SASIBot" +} + +// TOP is 1[0^w, 1^w]w +case object SITop extends StridedWrappedInterval { + override def toString = "SASITop" +} + +class SASILattice extends Lattice[StridedWrappedInterval] { + val lowestPossibleValue: BigInt = 0 + val highestPossibleValue: BigInt = Long.MaxValue - 1 + + override val bottom: StridedWrappedInterval = SIBottom + + override def top: StridedWrappedInterval = SITop + +// def gamma(x: StridedWrappedInterval): Set[BitVecLiteral] = x match { +// case SIBottom => Set.empty +// case SI(s, l, u, w) => +// if (s == BitVecLiteral(0, 64)) { // singleton set +// Set(l) +// } else { +// bitVec_interval(l, u, s) +// } +// } + + def isSingleValue(x: StridedWrappedInterval): Boolean = x match { + case SI(s, l, u, w) => s == 0 && l == u + case _ => false + } + + def modularPlus(a: BigInt, b: BigInt, w: BigInt): BigInt = { + (a + b) mod BigInt(2).pow(w.toInt) + } + + def modularMinus(a: BigInt, b: BigInt, w: BigInt): BigInt = { + (a - b) mod BigInt(2).pow(w.toInt) + } + + def modularLEQ(a: BigInt, b: BigInt, x: BigInt, w: BigInt): Boolean = { + modularMinus(a, x, w) <= modularMinus(b, x, w) + } + + def membershipFunction(v: BigInt, r: StridedWrappedInterval): Boolean = { + r match { + case SIBottom => false + case SITop => true + case SI(sr, lb, ub, w) => + modularLEQ(v, ub, lb, w) && (modularMinus(v, lb, w) mod sr) == 0 + } + } + + def cardinalityFunction(r: StridedWrappedInterval, w: BigInt): BigInt = { + r match { + case SIBottom => 0 + case SITop => BigInt(2).pow(w.toInt) + case SI(sr, lb, ub, w) => ((ub - lb + 1) / sr) // TODO: this may need to be a math.floor operation + } + } + + def orderingOperator(r: StridedWrappedInterval, t: StridedWrappedInterval): Boolean = { + if (r == SITop && t != SITop) { + false + } else if (r == SIBottom || t == SITop) { + true + } else { + (r, t) match { + case (SI(sr, a, b, w1), SI(st, c, d, w2)) => + if ((a == c) && (b == d) && ((sr mod st) == 0)) { + return true + } + membershipFunction(a, t) && membershipFunction(b, t) && (!membershipFunction(c, r) || !membershipFunction(d, r)) && ((a - c) mod st) == 0 && (sr mod st) == 0 + case _ => false + } + } + } + + /** S1[L1, U1] join S2[L2, U2] -> gcd(S1, S2)[min(L1, L2), max(U1, U2)] */ + override def lub(r: StridedWrappedInterval, t: StridedWrappedInterval): StridedWrappedInterval = { +// (s, t) match { +// case (SIBottom, t) => t +// case (t, SIBottom) => t +// case (SI(a, b, u1, w1), SI(s2, c, d, w2)) => +// var u: BigInt = 0 +// var l: BigInt = 0 +// if (isSingleValue(s) && isSingleValue(t)) { +// val si1_card = WCardMod() +// val si2_card = WCardMod() +// if (si1_card <= si2_card) { +// l = a +// u = d +// } else { +// l = c +// u = b +// } +// +// SI(u - l, l, u, ) +// } +// } + + (r, t) match { + case (SI(sr, a, b, w1), SI(st, c, d, w2)) => + assert(w1 == w2) + val w = w1 // TODO: should this be the largest? + if (orderingOperator(r, t)) { + return t + } + if (orderingOperator(t, r)) { + return r + } + if (membershipFunction(a, t) && membershipFunction(b, t) && membershipFunction(c, r) && membershipFunction(d, r)) { + return SITop + } + if (membershipFunction(c, r) && membershipFunction(b, t) && !membershipFunction(a, t) && !membershipFunction(d, r)) { + return SI(sr.gcd(st).gcd(modularMinus(d, a, w)), a, d, w) + } + if (membershipFunction(a, t) && membershipFunction(d, r) && !membershipFunction(c, r) && !membershipFunction(b, t)) { + return SI(sr.gcd(st).gcd(modularMinus(b, c, w)), c, b, w) + } + val sad = SI(sr.gcd(st).gcd(modularMinus(d, a, w)), a, d, w) + val scb = SI(sr.gcd(st).gcd(modularMinus(b, c, w)), c, b, w) + if (!membershipFunction(a, t) && !membershipFunction(d, r) && !membershipFunction(c, r) && !membershipFunction(b, t) && cardinalityFunction(sad, w) <= cardinalityFunction(scb, w)) { + return sad + } + return scb + case _ => ??? + } + } + + def singletonSI(v: BigInt, w: BigInt): StridedWrappedInterval = { + SI(0, v, v, w) + } + + /** + * s + t = + * BOT if s = BOT or t = BOT + * gcd(s, t)(|a +w c, b +w d|) if s = (|a, b|), t = (|c, d|) and #s + #t <= 2^w + * @param s + * @param t + * @return + */ + def add(s: StridedWrappedInterval, t: StridedWrappedInterval): StridedWrappedInterval = { + (s, t) match { + case (SIBottom, _) => SIBottom // TODO: is this correct? + case (_, SIBottom) => SIBottom // TODO: is this correct? + case (SI(ss, a, b, w1), SI(st, c, d, w2)) if (cardinalityFunction(s, w1) + cardinalityFunction(t, w2)) <= BigInt(2).pow(w1.toInt) => + assert(w1 == w2) + return SI(ss.gcd(st), modularPlus(a, c, w1), modularPlus(b, d, w1), w1) + case _ => SITop + } + } + + def add(s: StridedWrappedInterval, t: BigInt, w: BigInt): StridedWrappedInterval = { + (s, t) match { + case (SIBottom, _) => SIBottom // TODO: is this correct? + case (SI(ss, a, b, w1), t) => + return add(s, singletonSI(t, w)) + case _ => SITop + } + } + + def sub(s: StridedWrappedInterval, t: StridedWrappedInterval): StridedWrappedInterval = { + (s, t) match { + case (SIBottom, _) => SIBottom // TODO: is this correct? + case (_, SIBottom) => SIBottom // TODO: is this correct? + case (SI(ss, a, b, w1), SI(st, c, d, w2)) if (cardinalityFunction(s, w1) + cardinalityFunction(t, w2)) <= BigInt(2).pow(w1.toInt) => + assert(w1 == w2) + return SI(ss.gcd(st), modularMinus(a, d, w1), modularMinus(b, c, w1), w1) + case _ => SITop + } + } + + def sub(s: StridedWrappedInterval, t: BigInt, w: BigInt): StridedWrappedInterval = { + (s, t) match { + case (SIBottom, _) => SIBottom // TODO: is this correct? + case (SI(ss, a, b, w1), t) => + return sub(s, singletonSI(t, w)) + case _ => SITop + } + } + + +// /** S1[L1, U1] meet S2[L2, U2] -> gcd(S1, S2)[max(L1, L2), min(U1, U2)] */ +// def meet(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => SIBottom +// case (t, SIBottom) => SIBottom +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// SI(bitVec_gcd(s1, s2), bitVec_max(l1, l2), bitVec_min(u1, u2)) +// } +// } +// +// /** Addition +// * Addition defined in page 6 Figure 2 of: https://dl.acm.org/doi/pdf/10.1145/1111542.1111560 +// * */ +// def add(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// (x, y) match { +// case (SIBottom, t) => t +// case (t, SIBottom) => t +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// val lbound = smt_bvadd(l1, l2) +// val ubound = smt_bvadd(u1, u2) +// val s = bitVec_gcd(s1, s2) +// if (smt_bvsle(ubound, highestPossibleValue) == TrueLiteral && smt_bvsge(lbound, lowestPossibleValue) == TrueLiteral) { +// SI(s, lbound, ubound) +// } else { +// throw new IllegalArgumentException(s"Addition overflow: $lbound, $ubound") +// } +// } +// } +// +// /** Unary Minus */ +// def unaryMinus(x: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// x match { +// case SIBottom => SIBottom +// case SI(s, l, u) => +// if (smt_bvcomp(l, u) == BitVecLiteral(1, 1) && (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(1, 1) && smt_bvcomp(u, lowestPossibleValue) == BitVecLiteral(1, 1))) { +// SI(BitVecLiteral(0, 64), lowestPossibleValue, lowestPossibleValue) +// } else if (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(0, 1)) { +// SI(s, smt_bvneg(u), smt_bvneg(l)) +// } +// else { +// SI(BitVecLiteral(1, 64), lowestPossibleValue, highestPossibleValue) +// } +// } +// } +// +// /** Substraction */ +// def sub(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// add(x, unaryMinus(y)) +// } +// +// /** Widen */ +// def widen(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// /* formula for widening: +// given: s1[lb1, ub1] and s2[lb2, ub2] +// then: gcd(s1, s2)[min(lb1, lb2), max(ub1, ub2)] +// where: min(lb1, lb2) = lb1 if lb1 <= lb2 +// and: min(lb1, lb2) = minPossibleValue otherwise +// where: max(ub1, ub2) = ub1 if ub1 >= ub2 +// and: max(ub1, ub2) = maxPossibleValue otherwise +// +// assuming: +// minPossibleValue = lowestPossibleValue +// maxPossibleValue = highestPossibleValue + (lb - 1) mod s +// */ +// (x, y) match { +// case (SIBottom, t) => ??? +// case (t, SIBottom) => ??? +// case (SI(s1, l1, u1), SI(s2, l2, u2)) => +// val s = bitVec_gcd(s1, s2) +// val l = if (smt_bvule(l1, l2) == TrueLiteral) l1 else lowestPossibleValue +// val u = if (smt_bvuge(u1, u2) == TrueLiteral) u1 else smt_bvsmod(smt_bvadd(highestPossibleValue, smt_bvsub(l1, BitVecLiteral(1, 64))), s) +// SI(s, l, u) +// } +// } +// +// /** +// * Calculating strided interval for a list of values using accumulative gcd. +// * +// * @param x the list of values +// * @return the strided interval representing the values in the list +// */ +// def valuesToSI(x: List[BitVecLiteral]): StridedInterval[BitVecLiteral] = { +// if (x.isEmpty) { +// SIBottom +// } else { +// val l = bitVec_min(x) +// val u = bitVec_max(x) +// val initialStride = smt_bvsub(u, l) +// val stride = x.foldLeft(initialStride) { +// case (acc, v) => bitVec_gcd(smt_bvsub(v, l), acc) +// } +// SI(stride, l, u) +// } +// } +} + +sealed trait ValueSet[T] + +case class VS[T](m: Map[T, StridedWrappedInterval]) extends ValueSet[T] { + override def toString: String = m.toString +} + +/** The lattice of integers with the standard ordering. + */ +class ValueSetLattice[T] extends Lattice[ValueSet[T]] { + + case object VSBottom extends ValueSet[T] { + override def toString = "VSBot" + } + + case object VSTop extends ValueSet[T] { + override def toString = "VSTop" + } + + override val bottom: ValueSet[T] = VSBottom + override def top: ValueSet[T] = VSTop + + val lattice: SASILattice = SASILattice() + + override def lub(x: ValueSet[T], y: ValueSet[T]): ValueSet[T] = { + (x, y) match { + case (VSBottom, t) => t + case (t, VSBottom) => t + case (VSTop, _) => VSTop + case (_, VSTop) => VSTop + case (VS(m1), VS(m2)) => + VS(m1.keys.foldLeft(m2) { + case (acc, k) => + val v1 = m1(k) + val v2 = m2(k) + acc + (k -> lattice.lub(v1, v2)) + }) + } + } + +// def meet(x: ValueSet[String], y: ValueSet[String]): ValueSet[String] = { +// (x, y) match { +// case (VSBottom, t) => VSBottom +// case (t, VSBottom) => VSBottom +// case (VSTop, _) => y +// case (_, VSTop) => x +// case (VS(m1), VS(m2)) => +// VS(m1.keys.foldLeft(m2) { +// case (acc, k) => +// val v1 = m1(k) +// val v2 = m2(k) +// acc + (k -> lattice.meet(v1, v2)) +// }) +// } +// } + + def applyOp(op: BinOp, lhs: ValueSet[T], rhs: Either[ValueSet[T], BitVecLiteral]): ValueSet[T] = { + op match + case bvOp: BVBinOp => + bvOp match + case BVAND => ??? + case BVOR => ??? + case BVADD => rhs match + case Left(vs) => add(lhs, vs) + case Right(bitVecLiteral) => add(lhs, bitVecLiteral) + case BVMUL => ??? + case BVUDIV => ??? + case BVUREM => ??? + case BVSHL => ??? + case BVLSHR => ??? + case BVULT => ??? + case BVNAND => ??? + case BVNOR => ??? + case BVXOR => ??? + case BVXNOR => ??? + case BVCOMP => ??? + case BVSUB => rhs match + case Left(vs) => sub(lhs, vs) + case Right(bitVecLiteral) => sub(lhs, bitVecLiteral) + case BVSDIV => ??? + case BVSREM => ??? + case BVSMOD => ??? + case BVASHR => ??? + case BVULE => ??? + case BVUGT => ??? + case BVUGE => ??? + case BVSLT => ??? + case BVSLE => ??? + case BVSGT => ??? + case BVSGE => ??? + case BVEQ => ??? + case BVNEQ => ??? + case BVCONCAT => ??? + case boolOp: BoolBinOp => + boolOp match + case BoolEQ => applyOp(BVEQ, lhs, rhs) + case BoolNEQ => applyOp(BVNEQ, lhs, rhs) + case BoolAND => applyOp(BVAND, lhs, rhs) + case BoolOR => applyOp(BVOR, lhs, rhs) + case BoolIMPLIES => ??? + case BoolEQUIV => ??? + case intOp: IntBinOp => + applyOp(intOp.toBV, lhs, rhs) + case _ => ??? + } + + def applyOp(op: UnOp, rhs: ValueSet[T]): ValueSet[T] = { + op match + case bvOp: BVUnOp => + bvOp match + case BVNOT => ??? + case BVNEG => ??? + case boolOp: BoolUnOp => + boolOp match + case BoolNOT => ??? + case intOp: IntUnOp => + applyOp(intOp.toBV, rhs) + case _ => ??? + } + + def add(x: ValueSet[T], y: ValueSet[T]): ValueSet[T] = { + (x, y) match { + case (VSBottom, t) => t + case (t, VSBottom) => t + case (VSTop, _) => VSTop + case (_, VSTop) => VSTop + case (VS(m1), VS(m2)) => + VS(m1.keys.foldLeft(m2) { + case (acc, k) => + val v1 = m1(k) + val v2 = m2(k) + acc + (k -> lattice.add(v1, v2)) + }) + } + } + + def add(x: ValueSet[T], y: BitVecLiteral): ValueSet[T] = { + x match { + case VSBottom => VSBottom + case VSTop => VSTop + case VS(m) => + VS(m.map { + case (k, s) => k -> lattice.add(s, y.value, y.size) // TODO: is the size correct here? + }) + } + } + + def sub(x: ValueSet[T], y: ValueSet[T]): ValueSet[T] = { + (x, y) match { + case (VSBottom, t) => VSBottom + case (t, VSBottom) => t + case (VS(m1), VS(m2)) => + VS(m1.keys.foldLeft(m2) { + case (acc, k) => + val v1 = m1(k) + val v2 = m2(k) + acc + (k -> lattice.sub(v1, v2)) + }) + } + } + + def sub(x: ValueSet[T], y: BitVecLiteral): ValueSet[T] = { + x match { + case VSBottom => VSBottom + case VS(m) => + VS(m.map { + case (k, s) => k -> lattice.sub(s, y.value, y.size) // TODO: is the size correct here? + }) + } + } + +// def widen(vs1: ValueSet[T], vs2: ValueSet[T]): ValueSet[T] = { +// (vs1, vs2) match { +// case (VSBottom, t) => ??? +// case (t, VSBottom) => ??? +// case (VSTop, _) => VSTop +// case (_, VSTop) => VSTop +// case (VS(m1), VS(m2)) => +// VS(m1.keys.foldLeft(m2) { +// case (acc, k) => +// val v1 = m1(k) +// val v2 = m2(k) +// acc + (k -> lattice.widen(v1, v2)) +// }) +// } +// } + + def removeLowerBounds(vs: ValueSet[T]): ValueSet[T] = { + vs match { + case VSBottom => VSBottom + case VSTop => VSTop + case VS(m) => + VS(m.map { + case (k, SI(s, l, u, w)) => k -> SI(s, lattice.lowestPossibleValue, u, w) + }) + } + } + + def removeUpperBound(vs: ValueSet[T]): ValueSet[T] = { + vs match { + case VSBottom => VSBottom + case VSTop => VSTop + case VS(m) => + VS(m.map { + case (k, SI(s, l, u, w)) => k -> SI(s, l, lattice.highestPossibleValue, w) + }) + } + } + + extension (r: DataRegion | StackRegion) + def start: BigInt = r match { + case d: DataRegion => d.start.value + case s: StackRegion => s.start.value + } + + def end(mmm: MemoryModelMap): BigInt = r match { + case d: DataRegion => mmm.getEnd(d) + case s: StackRegion => mmm.getEnd(s) + } + + /** + * ∗(vs, s): Returns a pair of sets (F, P). F represents the set of “fully accessed” a-locs: it + * consists of the a-locs that are of size s and whose starting addresses are in vs. P represents + * the set of “partially accessed” a-locs: it consists of (i) a-locs whose starting addresses are in + * vs but are not of size s, and (ii) a-locs whose addresses are in vs but whose starting addresses + * and sizes do not meet the conditions to be in F. [Reference VSA paper] + * + * @param vsR2 + * @param s size of the dereference + * @return + */ + def dereference(s: BigInt, vs: ValueSet[String], mmm: MemoryModelMap): (Set[MemoryRegion], Set[MemoryRegion]) = { + var fullyAccessedLocations = Set[MemoryRegion]() + var partiallyAccessedLocations = Set[MemoryRegion]() + vs match { + case VSBottom => VSBottom + case VSTop => ??? //TODO: should this return everything? + case VS(m) => + for (elem <- m) { + if (elem._2 != lattice.bottom) { // region SI defined + elem._2 match { + case SI(stride, lower, upper, w) => + // TODO: Global memory size can be retrieved from the symbol table and are of size s + // Map addresses to exact memory locations + val regionsWithSize = mmm.getRegionsWithSize(s, elem._1) + fullyAccessedLocations = fullyAccessedLocations ++ regionsWithSize.filter(region => (region.isInstanceOf[DataRegion | StackRegion] && region.asInstanceOf[DataRegion | StackRegion].start >= lower && region.asInstanceOf[DataRegion | StackRegion].start <= upper) || region.isInstanceOf[HeapRegion]) // TODO: THIS IS NOT THE RIGHT WAY TO CHECK IF ADDRESS IS IN SI + + // Identify partially accessed locations (if any) + val notOfSize = mmm.getRegionsWithSize(s, elem._1, true) + // start or end must be in SI + partiallyAccessedLocations = partiallyAccessedLocations ++ regionsWithSize.filter(region => region.asInstanceOf[DataRegion | StackRegion].start >= lower && region.asInstanceOf[DataRegion | StackRegion].start <= upper || ((region.asInstanceOf[DataRegion | StackRegion].end(mmm) >= lower) && region.asInstanceOf[DataRegion | StackRegion].end(mmm) <= upper)) + case _ => ??? + } + } + } + } + // Return the set of fully accessed locations and the set of partially accessed locations + (fullyAccessedLocations, partiallyAccessedLocations) + } +} + +trait Bool3 + +case object BOTTOM_BOOL3 extends Bool3 { + override def toString = "BOTTOM" +} + +case object FALSE_BOOL3 extends Bool3 { + override def toString = "FALSE" +} + +case object TURE_BOOL3 extends Bool3 { + override def toString = "TRUE" +} + +case object MAYBE_BOOL3 extends Bool3 { + override def toString = "MAYBE" +} + +/** The lattice of booleans with the standard ordering. + */ +class Bool3Lattice extends Lattice[Bool3] { + + override val bottom: Bool3 = BOTTOM_BOOL3 + + override def top: Bool3 = MAYBE_BOOL3 + + override def lub(x: Bool3, y: Bool3): Bool3 = { + (x, y) match { + case (BOTTOM_BOOL3, t) => t + case (t, BOTTOM_BOOL3) => t + case (TURE_BOOL3, FALSE_BOOL3) => MAYBE_BOOL3 + case (FALSE_BOOL3, TURE_BOOL3) => MAYBE_BOOL3 + case _ => x + } + } +} + +enum Flags { + case CF // Carry Flag + case ZF // Zero Flag + case SF // Sign Flag + case PF // Parity Flag + case AF // Auxiliary Flag + case OF // Overflow Flag +} + +/** + * case CF // Carry Flag + * case ZF // Zero Flag + * case SF // Sign Flag + * case PF // Parity Flag + * case AF // Auxiliary Flag + * case OF // Overflow Flag + */ +trait Flag + +case object BOTTOM_Flag extends Flag { + override def toString = "BOTTOM_FLAG" +} + +case class FlagMap(m: Map[Flags, Bool3]) extends Flag { + override def toString: String = m.toString +} + + +/** The lattice of booleans with the standard ordering. + */ +class FlagLattice extends Lattice[Flag] { + + override val bottom: Flag = BOTTOM_Flag + + override def top: Flag = FlagMap(Map( + Flags.CF -> MAYBE_BOOL3, + Flags.ZF -> MAYBE_BOOL3, + Flags.SF -> MAYBE_BOOL3, + Flags.PF -> MAYBE_BOOL3, + Flags.AF -> MAYBE_BOOL3, + Flags.OF -> MAYBE_BOOL3 + )) + + val lattice: Bool3Lattice = Bool3Lattice() + + override def lub(x: Flag, y: Flag): Flag = { + (x, y) match { + case (BOTTOM_Flag, t) => t + case (t, BOTTOM_Flag) => t + case (FlagMap(m1), FlagMap(m2)) => + FlagMap(m1.keys.foldLeft(m2) { + case (acc, k) => + val v1 = m1(k) + val v2 = m2(k) + acc + (k -> lattice.lub(v1, v2)) + }) + } + } + + def setFlag(flag: Flags, value: Bool3): Flag = { + FlagMap(Map(flag -> value)) + } +} + + /** The powerset lattice of a set of elements of type `A` with subset ordering. */ class PowersetLattice[A] extends Lattice[Set[A]] { diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index d078af751..d2af01dbc 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -11,14 +11,15 @@ trait MemoryRegionAnalysis(val program: Program, val globals: Map[BigInt, String], val globalOffsets: Map[BigInt, BigInt], val subroutines: Map[BigInt, String], - val constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + val constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], val ANRResult: Map[CFGPosition, Set[Variable]], val RNAResult: Map[CFGPosition, Set[Variable]], - val regionAccesses: Map[CfgNode, Map[RegisterVariableWrapper, FlatElement[Expr]]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - val maxDepth: Int) { + val regionAccesses: Map[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]], + val reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + val maxDepth: Int, + val exactMatch: Boolean) { - var mallocCount: Int = 0 + private var mallocCount: Int = 0 private var stackCount: Int = 0 val stackMap: mutable.Map[Procedure, mutable.Map[Expr, StackRegion]] = mutable.Map() @@ -89,7 +90,7 @@ trait MemoryRegionAnalysis(val program: Program, private val spList = ListBuffer[Expr](stackPointer) private val ignoreRegions: Set[Expr] = Set(linkRegister, framePointer) // TODO: this could be used instead of regionAccesses in other analyses to reduce the Expr to region conversion - private val registerToRegions: mutable.Map[RegisterVariableWrapper, mutable.Set[MemoryRegion]] = mutable.Map() + private val registerToRegions: mutable.Map[RegisterWrapperPartialEquality, mutable.Set[MemoryRegion]] = mutable.Map() val procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]] = mutable.Map() var depthMap: mutable.Map[CFGPosition, Int] = mutable.Map() @@ -118,19 +119,21 @@ trait MemoryRegionAnalysis(val program: Program, println(s"Unreducible: $i") eval(i.rhs, Set.empty, i) } - evaluateExpression(binExpr.arg2, constantProp(n)) match { - case Some(b: BitVecLiteral) => - regions.foreach { - case stackRegion: StackRegion => - val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) - evaluateExpression(nextOffset, constantProp(n)) match { - case Some(b2: BitVecLiteral) => - reducedRegions = reducedRegions + poolMaster(b2, IRWalk.procedure(n)) - case None => - } - case _ => - } - case None => + for (elem <- evaluateExpressionWithSSA(binExpr.arg2, constantProp(n), n, reachingDefs, exactMatch)) { + elem match { + case b: BitVecLiteral => + regions.foreach { + case stackRegion: StackRegion => + val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) + for (elem <- evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs, exactMatch)) { + elem match { + case b2: BitVecLiteral => + reducedRegions = reducedRegions + poolMaster(b2, IRWalk.procedure(n)) + } + } + case _ => + } + } } } case _ => @@ -140,66 +143,72 @@ trait MemoryRegionAnalysis(val program: Program, def eval(exp: Expr, env: Set[MemoryRegion], n: Command): Set[MemoryRegion] = { println(s"Asked to evaluate: $exp at ${n.label}") + val regionsToReturn = mutable.Set[MemoryRegion]() exp match { case binOp: BinaryExpr => if (spList.contains(binOp.arg1)) { - evaluateExpression(binOp.arg2, constantProp(n)) match { - case Some(b: BitVecLiteral) => Set(poolMaster(b, IRWalk.procedure(n))) - case None => throw RuntimeException(s"This should be reducible: $exp") + for (elem <- evaluateExpressionWithSSA(binOp.arg2, constantProp(n), n, reachingDefs, exactMatch)) { + elem match { + case b: BitVecLiteral => regionsToReturn.addAll(Set(poolMaster(b, IRWalk.procedure(n)))) + } } } else { val reduced = reducibleToRegion(binOp, n) if (reduced.nonEmpty) { println(s"Reducible: exp $exp") - reduced + regionsToReturn.addAll(reduced) } else { - evaluateExpression(binOp, constantProp(n)) match { - case Some(b: BitVecLiteral) => eval(b, env, n) - case None => eval(binOp.arg1, env, n) ++ eval(binOp.arg2, env, n) + val elems = evaluateExpressionWithSSA(binOp, constantProp(n), n, reachingDefs, exactMatch) + for (elem <- elems) { + elem match { + case b: BitVecLiteral => regionsToReturn.addAll(eval(b, env, n)) + } + } + if (elems.isEmpty) { + regionsToReturn.addAll(eval(binOp.arg1, env, n) ++ eval(binOp.arg2, env, n)) } } } case variable: Variable => variable match { case _: LocalVar => - env case reg: Register if spList.contains(reg) => - Set(poolMaster(BitVecLiteral(0, 64), IRWalk.procedure(n))) + regionsToReturn.addAll(Set(poolMaster(BitVecLiteral(0, 64), IRWalk.procedure(n)))) case _ => - evaluateExpression(variable, constantProp(n)) match { - case Some(b: BitVecLiteral) => - eval(b, env, n) - case _ => - env // we cannot evaluate this to a concrete value, we need VSA for this + for (elem <- evaluateExpressionWithSSA(variable, constantProp(n), n, reachingDefs, exactMatch)) { + elem match { + case b: BitVecLiteral => + regionsToReturn.addAll(eval(b, env, n)) + } } } case memoryLoad: MemoryLoad => - eval(memoryLoad.index, env, n) + regionsToReturn.addAll(eval(memoryLoad.index, env, n)) // ignore case where it could be a global region (loaded later in MMM from relf) case b: BitVecLiteral => - env case literal: Literal => // ignore literals other than BitVectors - env case extract: Extract => - eval(extract.body, env, n) + regionsToReturn.addAll(eval(extract.body, env, n)) case repeat: Repeat => - eval(repeat.body, env, n) + regionsToReturn.addAll(eval(repeat.body, env, n)) case zeroExtend: ZeroExtend => - eval(zeroExtend.body, env, n) + regionsToReturn.addAll(eval(zeroExtend.body, env, n)) case signExtend: SignExtend => - eval(signExtend.body, env, n) + regionsToReturn.addAll(eval(signExtend.body, env, n)) case unaryExpr: UnaryExpr => - eval(unaryExpr.arg, env, n) + regionsToReturn.addAll(eval(unaryExpr.arg, env, n)) case memoryStore: MemoryAssign => - eval(memoryStore.index, env, n) ++ eval(memoryStore.value, env, n) + regionsToReturn.addAll(eval(memoryStore.index, env, n) ++ eval(memoryStore.value, env, n)) case memory: Memory => - env } + regionsToReturn.toSet } /** Transfer function for state lattice elements. */ - def localTransfer(n: CFGPosition, s: Set[MemoryRegion]): Set[MemoryRegion] = n match { + def localTransfer(n: CFGPosition, s: Set[MemoryRegion]): Set[MemoryRegion] = + var m = s + n match { case cmd: Command => println(s"N: $n") cmd match { @@ -222,44 +231,72 @@ trait MemoryRegionAnalysis(val program: Program, // } // } if (directCall.target.name == "malloc") { - evaluateExpression(mallocVariable, constantProp(n)) match { - case Some(b: BitVecLiteral) => regionLattice.lub(s, Set(HeapRegion(nextMallocCount(), b, IRWalk.procedure(n)))) - case None => s + for (elem <- evaluateExpressionWithSSA(mallocVariable, constantProp(n), n, reachingDefs, exactMatch)) { + elem match { + case b: BitVecLiteral => m = regionLattice.lub(m, Set(HeapRegion(nextMallocCount(), b, IRWalk.procedure(n)))) + } } - } else { - s } case memAssign: MemoryAssign => - val result = eval(memAssign.index, s, cmd) - regionLattice.lub(s, result) + val result = eval(memAssign.rhs.index, m, cmd) + m = regionLattice.lub(m, result) case localAssign: Assign => stackDetection(localAssign) - val result = eval(localAssign.rhs, s, cmd) - regionLattice.lub(s, result) - case _ => s + val result = eval(localAssign.rhs, m, cmd) + m = regionLattice.lub(m, result) + case _ => } - case _ => s // ignore other kinds of nodes + case _ => // ignore other kinds of nodes } + m def transferUnlifted(n: CFGPosition, s: Set[MemoryRegion]): Set[MemoryRegion] = localTransfer(n, s) } class MemoryRegionAnalysisSolver( - program: Program, - globals: Map[BigInt, String], - globalOffsets: Map[BigInt, BigInt], - subroutines: Map[BigInt, String], - constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - ANRResult: Map[CFGPosition, Set[Variable]], - RNAResult: Map[CFGPosition, Set[Variable]], - regionAccesses: Map[CfgNode, Map[RegisterVariableWrapper, FlatElement[Expr]]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - maxDepth: Int - ) extends MemoryRegionAnalysis(program, globals, globalOffsets, subroutines, constantProp, ANRResult, RNAResult, regionAccesses, reachingDefs, maxDepth) + program: Program, + globals: Map[BigInt, String], + globalOffsets: Map[BigInt, BigInt], + subroutines: Map[BigInt, String], + constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], + ANRResult: Map[CFGPosition, Set[Variable]], + RNAResult: Map[CFGPosition, Set[Variable]], + regionAccesses: Map[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]], + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + maxDepth: Int, + exactMatch: Boolean = true + ) extends MemoryRegionAnalysis(program, globals, globalOffsets, subroutines, constantProp, ANRResult, RNAResult, regionAccesses, reachingDefs, maxDepth, exactMatch) with IRIntraproceduralForwardDependencies with Analysis[Map[CFGPosition, LiftedElement[Set[MemoryRegion]]]] with WorklistFixpointSolverWithReachability[CFGPosition, Set[MemoryRegion], PowersetLattice[MemoryRegion]] { + override def funsub(n: CFGPosition, x: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]]): LiftedElement[Set[MemoryRegion]] = { + n match { + // function entry nodes are always reachable as this is intraprocedural + case _: Procedure => liftedLattice.lift(regionLattice.bottom) + // all other nodes are processed with join+transfer + case _ => super.funsub(n, x) + } + } +} + +class InterprocMemoryRegionAnalysisSolver( + program: Program, + globals: Map[BigInt, String], + globalOffsets: Map[BigInt, BigInt], + subroutines: Map[BigInt, String], + constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], + ANRResult: Map[CFGPosition, Set[Variable]], + RNAResult: Map[CFGPosition, Set[Variable]], + regionAccesses: Map[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]], + reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], + maxDepth: Int, + exactMatch: Boolean = false + ) extends MemoryRegionAnalysis(program, globals, globalOffsets, subroutines, constantProp, ANRResult, RNAResult, regionAccesses, reachingDefs, maxDepth, exactMatch) + with IRInterproceduralForwardDependencies + with Analysis[Map[CFGPosition, LiftedElement[Set[MemoryRegion]]]] + with WorklistFixpointSolverWithReachability[CFGPosition, Set[MemoryRegion], PowersetLattice[MemoryRegion]] { + override def funsub(n: CFGPosition, x: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]]): LiftedElement[Set[MemoryRegion]] = { n match { // function entry nodes are always reachable as this is intraprocedural diff --git a/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala b/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala index 47592f082..15236cb16 100644 --- a/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala +++ b/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala @@ -28,7 +28,7 @@ case class ReachingDefinitionsAnalysis(program: Program) { private def generateUniqueDefinition( variable: Variable ): Assign = { - Assign(variable, BitVecLiteral(0, 0)) + Assign(variable, Register("Unique", BitVecLiteral(0, 0))) } def transfer(n: CFGPosition, s: (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]])): (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]) = @@ -71,6 +71,15 @@ case class ReachingDefinitionsAnalysis(program: Program) { transformUses(assume.body.variables, s) case indirectCall: IndirectCall => transformUses(indirectCall.target.variables, s) + case directCall: DirectCall if directCall.target.name == "malloc" => + // assume R0 has been assigned, generate a fake definition + val mallocVar = Register("R0", BitVecType(64)) + val mallocDef = generateUniqueDefinition(mallocVar) + val mallocUseDefs: Map[Variable, Set[Definition]] = Set(mallocVar).foldLeft(Map.empty[Variable, Set[Definition]]) { + case (acc, v) => + acc + (v -> s._1(v)) + } + (s._1 + (Register("R0", BitVecType(64)) -> Set(mallocDef)), mallocUseDefs) case _ => s } } @@ -79,3 +88,8 @@ class ReachingDefinitionsAnalysisSolver(program: Program) extends ReachingDefinitionsAnalysis(program) with SimpleWorklistFixpointSolver[CFGPosition, (Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]], Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]]), ReachingDefinitionsAnalysis#TupleElement] with IRIntraproceduralForwardDependencies + +class InterprocReachingDefinitionsAnalysisSolver(program: Program) + extends ReachingDefinitionsAnalysis(program) + with SimpleWorklistFixpointSolver[CFGPosition, (Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]], Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]]), ReachingDefinitionsAnalysis#TupleElement] + with IRInterproceduralForwardDependencies \ No newline at end of file diff --git a/src/main/scala/analysis/RegToMemAnalysis.scala b/src/main/scala/analysis/RegToMemAnalysis.scala index df7217f75..1b6f9b51c 100644 --- a/src/main/scala/analysis/RegToMemAnalysis.scala +++ b/src/main/scala/analysis/RegToMemAnalysis.scala @@ -17,9 +17,9 @@ import scala.collection.immutable */ trait RegionAccessesAnalysis(cfg: ProgramCfg, constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]) { - val mapLattice: MapLattice[RegisterVariableWrapper, FlatElement[Expr], FlatLattice[Expr]] = MapLattice(FlatLattice[_root_.ir.Expr]()) + val mapLattice: MapLattice[RegisterWrapperPartialEquality, FlatElement[Expr], FlatLattice[Expr]] = MapLattice(FlatLattice[_root_.ir.Expr]()) - val lattice: MapLattice[CfgNode, Map[RegisterVariableWrapper, FlatElement[Expr]], MapLattice[RegisterVariableWrapper, FlatElement[Expr], FlatLattice[Expr]]] = MapLattice(mapLattice) + val lattice: MapLattice[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]], MapLattice[RegisterWrapperPartialEquality, FlatElement[Expr], FlatLattice[Expr]]] = MapLattice(mapLattice) val domain: Set[CfgNode] = cfg.nodes.toSet @@ -27,17 +27,15 @@ trait RegionAccessesAnalysis(cfg: ProgramCfg, constantProp: Map[CFGPosition, Map /** Default implementation of eval. */ - def eval(cmd: CfgCommandNode, constants: Map[Variable, FlatElement[BitVecLiteral]], s: Map[RegisterVariableWrapper, FlatElement[Expr]]): Map[RegisterVariableWrapper, FlatElement[Expr]] = { + def eval(cmd: CfgCommandNode, constants: Map[Variable, FlatElement[BitVecLiteral]], s: Map[RegisterWrapperPartialEquality, FlatElement[Expr]]): Map[RegisterWrapperPartialEquality, FlatElement[Expr]] = { cmd.data match { case assign: Assign => assign.rhs match { case memoryLoad: MemoryLoad => - s + (RegisterVariableWrapper(assign.lhs, getDefinition(assign.lhs, cmd.data, reachingDefs)) -> FlatEl(memoryLoad)) + s + (RegisterWrapperPartialEquality(assign.lhs, getDefinition(assign.lhs, cmd.data, reachingDefs)) -> FlatEl(memoryLoad)) case binaryExpr: BinaryExpr => if (evaluateExpression(binaryExpr.arg1, constants).isEmpty) { // approximates Base + Offset - Logger.debug(s"Approximating $assign in $binaryExpr") - Logger.debug(s"Reaching defs: ${reachingDefs(cmd.data)}") - s + (RegisterVariableWrapper(assign.lhs, getDefinition(assign.lhs, cmd.data, reachingDefs)) -> FlatEl(binaryExpr)) + s + (RegisterWrapperPartialEquality(assign.lhs, getDefinition(assign.lhs, cmd.data, reachingDefs)) -> FlatEl(binaryExpr)) } else { s } @@ -50,7 +48,7 @@ trait RegionAccessesAnalysis(cfg: ProgramCfg, constantProp: Map[CFGPosition, Map /** Transfer function for state lattice elements. */ - def localTransfer(n: CfgNode, s: Map[RegisterVariableWrapper, FlatElement[Expr]]): Map[RegisterVariableWrapper, FlatElement[Expr]] = n match { + def localTransfer(n: CfgNode, s: Map[RegisterWrapperPartialEquality, FlatElement[Expr]]): Map[RegisterWrapperPartialEquality, FlatElement[Expr]] = n match { case cmd: CfgCommandNode => eval(cmd, constantProp(cmd.data), s) case _ => s // ignore other kinds of nodes @@ -58,7 +56,7 @@ trait RegionAccessesAnalysis(cfg: ProgramCfg, constantProp: Map[CFGPosition, Map /** Transfer function for state lattice elements. */ - def transfer(n: CfgNode, s: Map[RegisterVariableWrapper, FlatElement[Expr]]): Map[RegisterVariableWrapper, FlatElement[Expr]] = localTransfer(n, s) + def transfer(n: CfgNode, s: Map[RegisterWrapperPartialEquality, FlatElement[Expr]]): Map[RegisterWrapperPartialEquality, FlatElement[Expr]] = localTransfer(n, s) } class RegionAccessesAnalysisSolver( @@ -67,6 +65,6 @@ class RegionAccessesAnalysisSolver( reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], ) extends RegionAccessesAnalysis(cfg, constantProp, reachingDefs) with InterproceduralForwardDependencies - with Analysis[Map[CfgNode, Map[RegisterVariableWrapper, FlatElement[Expr]]]] - with SimpleWorklistFixpointSolver[CfgNode, Map[RegisterVariableWrapper, FlatElement[Expr]], MapLattice[RegisterVariableWrapper, FlatElement[Expr], FlatLattice[Expr]]] { + with Analysis[Map[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]]] + with SimpleWorklistFixpointSolver[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]], MapLattice[RegisterWrapperPartialEquality, FlatElement[Expr], FlatLattice[Expr]]] { } \ No newline at end of file diff --git a/src/main/scala/analysis/UtilMethods.scala b/src/main/scala/analysis/UtilMethods.scala index 665acbe8e..278c8c4ca 100644 --- a/src/main/scala/analysis/UtilMethods.scala +++ b/src/main/scala/analysis/UtilMethods.scala @@ -71,8 +71,20 @@ def evaluateExpression(exp: Expr, constantPropResult: Map[Variable, FlatElement[ } } -def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], n: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]): Set[BitVecLiteral] = { - def apply(op: (BitVecLiteral, BitVecLiteral) => BitVecLiteral, a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = { +/** + * Evaluate an expression in a hope of finding bitVector values for a global variable. + * If exactEquality is true, then the evaluation is done with exact equality. + * By default, exactEquality is true. + * Disabling exactEquality will allow for loose (intersection) equality and thus assist with interprocedural analysis. + * @param exp + * @param constantPropResult + * @param n + * @param reachingDefs + * @param exactEquality + * @return + */ +def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], n: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], exactEquality: Boolean = true): Set[BitVecLiteral] = { + def apply(op: (BitVecLiteral, BitVecLiteral) => BitVecLiteral, a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = val res = for { x <- a y <- b @@ -89,8 +101,8 @@ def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapper exp match { case binOp: BinaryExpr => - val lhs = evaluateExpressionWithSSA(binOp.arg1, constantPropResult, n, reachingDefs) - val rhs = evaluateExpressionWithSSA(binOp.arg2, constantPropResult, n, reachingDefs) + val lhs = evaluateExpressionWithSSA(binOp.arg1, constantPropResult, n, reachingDefs, exactEquality) + val rhs = evaluateExpressionWithSSA(binOp.arg2, constantPropResult, n, reachingDefs, exactEquality) binOp.op match { case BVADD => apply(BitVectorEval.smt_bvadd, lhs, rhs) case BVSUB => apply(BitVectorEval.smt_bvsub, lhs, rhs) @@ -114,23 +126,26 @@ def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapper case _ => throw RuntimeException("Binary operation support not implemented: " + binOp.op) } case unaryExpr: UnaryExpr => - val result = evaluateExpressionWithSSA(unaryExpr.arg, constantPropResult, n, reachingDefs) + val result = evaluateExpressionWithSSA(unaryExpr.arg, constantPropResult, n, reachingDefs, exactEquality) unaryExpr.op match { case BVNEG => applySingle(BitVectorEval.smt_bvneg, result) case BVNOT => applySingle(BitVectorEval.smt_bvnot, result) case _ => throw RuntimeException("Unary operation support not implemented: " + unaryExpr.op) } case extend: ZeroExtend => - val result = evaluateExpressionWithSSA(extend.body, constantPropResult, n, reachingDefs) + val result = evaluateExpressionWithSSA(extend.body, constantPropResult, n, reachingDefs, exactEquality) applySingle(BitVectorEval.smt_zero_extend(extend.extension, _: BitVecLiteral), result) - case extend: SignExtend => - val result = evaluateExpressionWithSSA(extend.body, constantPropResult, n, reachingDefs) - applySingle(BitVectorEval.smt_sign_extend(extend.extension, _: BitVecLiteral), result) + case se: SignExtend => + val result = evaluateExpressionWithSSA(se.body, constantPropResult, n, reachingDefs, exactEquality) + applySingle(BitVectorEval.smt_sign_extend(se.extension, _: BitVecLiteral), result) case e: Extract => - val result = evaluateExpressionWithSSA(e.body, constantPropResult, n, reachingDefs) + val result = evaluateExpressionWithSSA(e.body, constantPropResult, n, reachingDefs, exactEquality) applySingle(BitVectorEval.boogie_extract(e.end, e.start, _: BitVecLiteral), result) case variable: Variable => - constantPropResult(RegisterWrapperEqualSets(variable, getUse(variable, n, reachingDefs))) + if exactEquality then + constantPropResult(RegisterWrapperEqualSets(variable, getUse(variable, n, reachingDefs))) + else + constantPropResult.asInstanceOf[Map[RegisterWrapperPartialEquality, Set[BitVecLiteral]]](RegisterWrapperPartialEquality(variable, getUse(variable, n, reachingDefs))) case b: BitVecLiteral => Set(b) case _ => throw RuntimeException("ERROR: CASE NOT HANDLED: " + exp + "\n") } diff --git a/src/main/scala/analysis/solvers/AbstractSPAnalysis.scala b/src/main/scala/analysis/solvers/AbstractSPAnalysis.scala new file mode 100644 index 000000000..36efb4a9d --- /dev/null +++ b/src/main/scala/analysis/solvers/AbstractSPAnalysis.scala @@ -0,0 +1,95 @@ +//package analysis.solvers +// +//import ir.* +//import analysis.solvers._ +//import analysis.* +// +//import scala.collection.immutable +//import scala.collection.mutable +// +// +//class AbstractSP(val locations: Set[BitVecLiteral], val definitions: Set[LocalAssign]) { +// override def toString: String = "AbstractSP(" + location + ")" +// +// def add(that: BitVecLiteral, definer: Set[LocalAssign]): AbstractSP = { +// val newLocations = locations.map(l => BitVectorEval.smt_bvadd(l, that)) +// AbstractSP(newLocations, definer) +// } +// +// def sub(that: BitVecLiteral, definer: Set[LocalAssign]): AbstractSP = { +// val newLocations = locations.map(l => BitVectorEval.smt_bvsub(l, that)) +// AbstractSP(newLocations, definer) +// } +// +// def union(that: AbstractSP): AbstractSP = { +// AbstractSP(locations ++ that.locations, definitions ++ that.definitions) +// } +//} +// +//class TopAbstractSP extends AbstractSP(Set.empty, Set.empty) { +// override def toString: String = "TopAbstractSP" +//} +// +// +///** +// * Tracks the stack pointer abstractly and offers calculations for the stack pointer. +// * Uses +// */ +//trait AbstractSPAnalysis(program: Program, constantProp: Map[CFGPosition, Map[RegisterWrapperPartialEquality, Set[BitVecLiteral]]]) { +// +// val mapLattice: MapLattice[RegisterWrapperPartialEquality, FlatElement[AbstractSP], FlatLattice[AbstractSP]] = MapLattice(AbstractSPLattice()) +// +// val lattice: MapLattice[CFGPosition, Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]], MapLattice[RegisterWrapperPartialEquality, FlatElement[AbstractSP], FlatLattice[AbstractSP]]] = MapLattice(mapLattice) +// +// val domain: Set[CFGPosition] = Set.empty ++ program +// +// private val stackPointer = Register("R31", BitVecType(64)) +// +// /** Default implementation of eval. +// */ +// def eval(cmd: Command, s: Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]]): Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]] = { +// +// } +// +// /** Transfer function for state lattice elements. +// */ +// def localTransfer(n: CFGPosition, s: Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]]): Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]] = n match { +// case r: Command => +// r match { +// // assignments +// case la: LocalAssign => +// if (la.lhs == stackPointer) { +// val reachingDefs = getDefinition(la.lhs, n, reachingDefs) +// val rhs = eval(la.rhs, s, n, reachingDefs) +// val rhsLocations = rhs.locations +// val rhsDefinitions = rhs.definitions +// val lhs = AbstractSP(rhsLocations, rhsDefinitions) +// s + (la.lhs -> FlatEl(lhs)) +// } else { +// s + (la.lhs -> eval(la.rhs, s)) +// } +// +// val lhsWrappers = s.collect { +// case (k, v) if RegisterWrapperPartialEquality(k.variable, k.assigns) == RegisterWrapperPartialEquality(la.lhs, getDefinition(la.lhs, r, reachingDefs)) => (k, v) +// } +// if (lhsWrappers.nonEmpty) { +// s ++ lhsWrappers.map((k, v) => (RegisterWrapperEqualSets(k.variable, k.assigns ++ getDefinition(la.lhs, r, reachingDefs)), v.union(eval(la.rhs, s, r)))) +// } else { +// s + (RegisterWrapperEqualSets(la.lhs, getDefinition(la.lhs, r, reachingDefs)) -> eval(la.rhs, s, n)) +// } +// // all others: like no-ops +// case _ => s +// } +// case _ => s +// } +// +// /** Transfer function for state lattice elements. +// */ +// def transfer(n: CFGPosition, s: Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]]): Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]] = localTransfer(n, s) +//} +// +//class AbstractSPAnalysisSolver(program: Program, constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]]) extends AbstractSPAnalysis(program, constantProp) +// with IRIntraproceduralForwardDependencies +// with Analysis[Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]]] +// with SimpleWorklistFixpointSolver[CFGPosition, Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]], MapLattice[RegisterWrapperPartialEquality, FlatElement[AbstractSP], FlatLattice[AbstractSP]]] { +//} \ No newline at end of file diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 1c3db4599..e68eaf58b 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -50,18 +50,18 @@ case class IRContext( /** Stores the results of the static analyses. */ case class StaticAnalysisContext( - cfg: ProgramCfg, - constPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - IRconstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - memoryRegionResult: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]], - vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], - interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]], - paramResults: Map[Procedure, Set[Variable]], - steensgaardResults: Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]], - mmmResults: MemoryModelMap, - memoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - varDepsSummaries: Map[Procedure, Map[Taintable, Set[Taintable]]], + cfg: ProgramCfg, + constPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + IRconstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + memoryRegionResult: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]], + vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], + interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]], + paramResults: Map[Procedure, Set[Variable]], + steensgaardResults: Map[RegisterWrapperPartialEquality, Set[RegisterWrapperPartialEquality | MemoryRegion]], + mmmResults: MemoryModelMap, + memoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + varDepsSummaries: Map[Procedure, Map[Taintable, Set[Taintable]]], ) /** Results of the main program execution. @@ -338,7 +338,7 @@ object IRTransform { def resolveIndirectCallsUsingPointsTo( cfg: ProgramCfg, - pointsTos: Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]], + pointsTos: Map[RegisterWrapperPartialEquality, Set[RegisterWrapperPartialEquality | MemoryRegion]], regionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], IRProgram: Program @@ -396,11 +396,11 @@ object IRTransform { def resolveAddresses(variable: Variable, i: IndirectCall): mutable.Set[String] = { val names = mutable.Set[String]() - val variableWrapper = RegisterVariableWrapper(variable, getUse(variable, i, reachingDefs)) + val variableWrapper = RegisterWrapperPartialEquality(variable, getUse(variable, i, reachingDefs)) pointsTos.get(variableWrapper) match { case Some(value) => value.map { - case v: RegisterVariableWrapper => names.addAll(resolveAddresses(v.variable, i)) + case v: RegisterWrapperPartialEquality => names.addAll(resolveAddresses(v.variable, i)) case m: MemoryRegion => names.addAll(searchRegion(m)) } names @@ -691,7 +691,7 @@ object StaticAnalysis { }) Logger.info("[!] Running Reaching Definitions Analysis") - val reachingDefinitionsAnalysisSolver = ReachingDefinitionsAnalysisSolver(IRProgram) + val reachingDefinitionsAnalysisSolver = InterprocReachingDefinitionsAnalysisSolver(IRProgram) val reachingDefinitionsAnalysisResults = reachingDefinitionsAnalysisSolver.analyze() println(s"Finished reaching definitions at ${(System.nanoTime() - before) / 1000000} ms") @@ -720,7 +720,7 @@ object StaticAnalysis { println(s"Finished ConstProp with SSA at ${(System.nanoTime() - before) / 1000000} ms") Logger.info("[!] Running MRA") - val mraSolver = MemoryRegionAnalysisSolver(IRProgram, globalAddresses, globalOffsets, mergedSubroutines, constPropResult, ANRResult, RNAResult, regionAccessesAnalysisResults, reachingDefinitionsAnalysisResults, maxDepth = 3) + val mraSolver = InterprocMemoryRegionAnalysisSolver(IRProgram, globalAddresses, globalOffsets, mergedSubroutines, constPropResultWithSSA, ANRResult, RNAResult, regionAccessesAnalysisResults, reachingDefinitionsAnalysisResults, maxDepth = 3) val mraResult = mraSolver.analyze() Logger.info("[!] Running MMM") @@ -729,7 +729,7 @@ object StaticAnalysis { mmm.logRegions() Logger.info("[!] Injecting regions") - val regionInjector = RegionInjector(domain, constPropResultWithSSA, mmm, reachingDefinitionsAnalysisResults, globalOffsets) + val regionInjector = RegionInjector(domain, IRProgram, constPropResultWithSSA, mmm, reachingDefinitionsAnalysisResults, globalOffsets) regionInjector.nodeVisitor() Logger.info("[!] Running Steensgaard") @@ -751,6 +751,11 @@ object StaticAnalysis { s"${s}_new_ir_constprop$iteration.dot" ) + writeToFile( + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> constPropResultWithSSA(b).toString).toMap), + s"${s}_new_ir_constpropWithSSA$iteration.dot" + ) + writeToFile( toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> mraResult(b).toString).toMap), s"${s}_MRA$iteration.dot" @@ -762,17 +767,19 @@ object StaticAnalysis { // ValueSetAnalysisSolver(IRProgram, globalAddresses, externalAddresses, globalOffsets, subroutines, mmm, constPropResult) // val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = vsaSolver.analyze() - val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = Map() - val actualVSA = ActualVSA(IRProgram, constPropResult, reachingDefinitionsAnalysisResults, mmm) - val actualVSAResults: mutable.Map[CFGPosition, actualVSA.AbsEnv] = actualVSA.IntraProceduralVSA() - config.analysisDotPath.foreach(s => { - writeToFile( - toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> actualVSAResults.withDefaultValue(actualVSA.AbsEnv(mutable.Map(), mutable.Map(), mutable.Map())).get(b).toString).toMap), - s"${s}_ActualVSA$iteration.dot" - ) - }) + val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = Map() +// +// val actualVSA = ActualVSA(IRProgram, constPropResult, reachingDefinitionsAnalysisResults, mmm) +// val actualVSAResults: mutable.Map[CFGPosition, actualVSA.AbsEnv] = actualVSA.IntraProceduralVSA() +// +// config.analysisDotPath.foreach(s => { +// writeToFile( +// toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> actualVSAResults.withDefaultValue(actualVSA.AbsEnv()).get(b).toString).toMap), +// s"${s}_ActualVSA$iteration.dot" +// ) +// }) Logger.info("[!] Running Interprocedural Live Variables Analysis") //val interLiveVarsResults = InterLiveVarsAnalysis(IRProgram).analyze() From 6ac45fc0f6db0d49fed2621a4d063cfe56681e37 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Fri, 23 Aug 2024 11:29:06 +1000 Subject: [PATCH 037/104] Merge fixes --- .../InterprocSteensgaardAnalysis.scala | 2 +- .../scala/analysis/MemoryRegionAnalysis.scala | 4 +- .../ReachingDefinitionsAnalysis.scala | 6 +- src/main/scala/analysis/RegionInjector.scala | 557 +++++++++--------- src/main/scala/analysis/UtilMethods.scala | 39 +- src/main/scala/util/RunUtils.scala | 8 +- 6 files changed, 308 insertions(+), 308 deletions(-) diff --git a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala index 493f36e9f..0cfb7ca56 100644 --- a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala +++ b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala @@ -287,7 +287,7 @@ class InterprocSteensgaardAnalysis( case binOp: BinaryExpr => // X1 = &X2: [[X1]] = ↑[[X2]] exprToRegion(binOp, cmd).foreach( - x => unify(IdentifierVariable(RegisterWrapperPartialEquality(localAssign.lhs, getDefinition(assign.lhs, cmd, reachingDefs))), PointerRef(AllocVariable(x))) + x => unify(IdentifierVariable(RegisterWrapperPartialEquality(assign.lhs, getDefinition(assign.lhs, cmd, reachingDefs))), PointerRef(AllocVariable(x))) ) // TODO: should lookout for global base + offset case as well case _ => diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index d2af01dbc..b75ec0f24 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -238,7 +238,7 @@ trait MemoryRegionAnalysis(val program: Program, } } case memAssign: MemoryAssign => - val result = eval(memAssign.rhs.index, m, cmd) + val result = eval(memAssign.index, m, cmd) m = regionLattice.lub(m, result) case localAssign: Assign => stackDetection(localAssign) @@ -289,7 +289,7 @@ class InterprocMemoryRegionAnalysisSolver( ANRResult: Map[CFGPosition, Set[Variable]], RNAResult: Map[CFGPosition, Set[Variable]], regionAccesses: Map[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], maxDepth: Int, exactMatch: Boolean = false ) extends MemoryRegionAnalysis(program, globals, globalOffsets, subroutines, constantProp, ANRResult, RNAResult, regionAccesses, reachingDefs, maxDepth, exactMatch) diff --git a/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala b/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala index 15236cb16..c384bb12d 100644 --- a/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala +++ b/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala @@ -28,7 +28,7 @@ case class ReachingDefinitionsAnalysis(program: Program) { private def generateUniqueDefinition( variable: Variable ): Assign = { - Assign(variable, Register("Unique", BitVecLiteral(0, 0))) + Assign(variable, Register("Unique", 0)) } def transfer(n: CFGPosition, s: (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]])): (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]) = @@ -73,13 +73,13 @@ case class ReachingDefinitionsAnalysis(program: Program) { transformUses(indirectCall.target.variables, s) case directCall: DirectCall if directCall.target.name == "malloc" => // assume R0 has been assigned, generate a fake definition - val mallocVar = Register("R0", BitVecType(64)) + val mallocVar = Register("R0", 64) val mallocDef = generateUniqueDefinition(mallocVar) val mallocUseDefs: Map[Variable, Set[Definition]] = Set(mallocVar).foldLeft(Map.empty[Variable, Set[Definition]]) { case (acc, v) => acc + (v -> s._1(v)) } - (s._1 + (Register("R0", BitVecType(64)) -> Set(mallocDef)), mallocUseDefs) + (s._1 + (Register("R0", 64) -> Set(mallocDef)), mallocUseDefs) case _ => s } } diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index fc8633f9b..70fe29d9a 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -1,280 +1,281 @@ -package analysis - -import ir.* -import util.Logger -import scala.collection.immutable -import scala.collection.mutable -import scala.collection.mutable.ArrayBuffer - -/** - * Replaces the region access with the calculated memory region. - */ -class RegionInjector(domain: mutable.Set[CFGPosition], - program: Program, - constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], - mmm: MemoryModelMap, - reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], - globalOffsets: Map[BigInt, BigInt]) { - private val stackPointer = Register("R31", BitVecType(64)) - - def nodeVisitor(): Unit = { - for (elem <- domain) {localTransfer(elem)} - program.initialMemory = transformMemorySections(program.initialMemory) - program.readOnlyMemory = transformMemorySections(program.readOnlyMemory) - } - - /** - * Used to reduce an expression that may be a sub-region of a memory region. - * Pointer reduction example: - * R2 = R31 + 20 - * Mem[R2 + 8] <- R1 - * - * Steps: - * 1) R2 = R31 + 20 <- ie. stack access (assume R31 = stackPointer) - * ↓ - * R2 = StackRegion("stack_1", 20) - * - * 2) Mem[R2 + 8] <- R1 <- ie. memStore - * ↓ - * (StackRegion("stack_1", 20) + 8) <- R1 - * ↓ - * MMM.get(20 + 8) <- R1 - * - * @param binExpr - * @param n - * @return Set[MemoryRegion]: a set of regions that the expression may be pointing to - */ - def reducibleToRegion(binExpr: BinaryExpr, n: Command): Set[MemoryRegion] = { - var reducedRegions = Set.empty[MemoryRegion] - binExpr.arg1 match { - case variable: Variable => - evaluateExpressionWithSSA(binExpr, constantProp(n), n, reachingDefs).foreach { b => - val region = mmm.findDataObject(b.value) - reducedRegions = reducedRegions ++ region - } - if (reducedRegions.nonEmpty) { - return reducedRegions - } - val ctx = getUse(variable, n, reachingDefs) - for (i <- ctx) { - if (i != n) { // handles loops (ie. R19 = R19 + 1) %00000662 in jumptable2 - val regions = i.rhs match { - case loadL: MemoryLoad => - val foundRegions = exprToRegion(loadL.index, i) - val toReturn = mutable.Set[MemoryRegion]().addAll(foundRegions) - for { - f <- foundRegions - } { - // TODO: Must enable this (probably need to calculate those contents beforehand) -// if (memoryRegionContents.contains(f)) { -// memoryRegionContents(f).foreach { -// case b: BitVecLiteral => -// // val region = mmm.findDataObject(b.value) -// // if (region.isDefined) { -// // toReturn.addOne(region.get) -// // } -// case r: MemoryRegion => -// toReturn.addOne(r) -// toReturn.remove(f) +//package analysis +// +//import ir.* +//import util.Logger +//import scala.collection.immutable +//import scala.collection.mutable +//import scala.collection.mutable.ArrayBuffer +// +///** +// * Replaces the region access with the calculated memory region. +// */ +//class RegionInjector(domain: mutable.Set[CFGPosition], +// program: Program, +// constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], +// mmm: MemoryModelMap, +// reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], +// globalOffsets: Map[BigInt, BigInt]) { +// private val stackPointer = Register("R31", 64) +// +// def nodeVisitor(): Unit = { +// for (elem <- domain) {localTransfer(elem)} +// program.initialMemory = transformMemorySections(program.initialMemory) +// program.readOnlyMemory = transformMemorySections(program.readOnlyMemory) +// } +// +// /** +// * Used to reduce an expression that may be a sub-region of a memory region. +// * Pointer reduction example: +// * R2 = R31 + 20 +// * Mem[R2 + 8] <- R1 +// * +// * Steps: +// * 1) R2 = R31 + 20 <- ie. stack access (assume R31 = stackPointer) +// * ↓ +// * R2 = StackRegion("stack_1", 20) +// * +// * 2) Mem[R2 + 8] <- R1 <- ie. memStore +// * ↓ +// * (StackRegion("stack_1", 20) + 8) <- R1 +// * ↓ +// * MMM.get(20 + 8) <- R1 +// * +// * @param binExpr +// * @param n +// * @return Set[MemoryRegion]: a set of regions that the expression may be pointing to +// */ +// def reducibleToRegion(binExpr: BinaryExpr, n: Command): Set[MemoryRegion] = { +// var reducedRegions = Set.empty[MemoryRegion] +// binExpr.arg1 match { +// case variable: Variable => +// evaluateExpressionWithSSA(binExpr, constantProp(n), n, reachingDefs).foreach { b => +// val region = mmm.findDataObject(b.value) +// reducedRegions = reducedRegions ++ region +// } +// if (reducedRegions.nonEmpty) { +// return reducedRegions +// } +// val ctx = getUse(variable, n, reachingDefs) +// for (i <- ctx) { +// if (i != n) { // handles loops (ie. R19 = R19 + 1) %00000662 in jumptable2 +// val regions = i.rhs match { +// case loadL: MemoryLoad => +// val foundRegions = exprToRegion(loadL.index, i) +// val toReturn = mutable.Set[MemoryRegion]().addAll(foundRegions) +// for { +// f <- foundRegions +// } { +// // TODO: Must enable this (probably need to calculate those contents beforehand) +//// if (memoryRegionContents.contains(f)) { +//// memoryRegionContents(f).foreach { +//// case b: BitVecLiteral => +//// // val region = mmm.findDataObject(b.value) +//// // if (region.isDefined) { +//// // toReturn.addOne(region.get) +//// // } +//// case r: MemoryRegion => +//// toReturn.addOne(r) +//// toReturn.remove(f) +//// } +//// } +// } +// toReturn.toSet +// case _: BitVecLiteral => +// Set.empty[MemoryRegion] +// case _ => +// println(s"Unknown expression: ${i}") +// println(ctx) +// exprToRegion(i.rhs, i) +// } +// val results = evaluateExpressionWithSSA(binExpr.arg2, constantProp(n), n, reachingDefs) +// for { +// b <- results +// r <- regions +// } { +// r match { +// case stackRegion: StackRegion => +// println(s"StackRegion: ${stackRegion.start}") +// println(s"BitVecLiteral: ${b}") +// if (b.size == stackRegion.start.size) { +// val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) +// evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => +// reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, b2), n) // } // } - } - toReturn.toSet - case _: BitVecLiteral => - Set.empty[MemoryRegion] - case _ => - println(s"Unknown expression: ${i}") - println(ctx) - exprToRegion(i.rhs, i) - } - val results = evaluateExpressionWithSSA(binExpr.arg2, constantProp(n), n, reachingDefs) - for { - b <- results - r <- regions - } { - r match { - case stackRegion: StackRegion => - println(s"StackRegion: ${stackRegion.start}") - println(s"BitVecLiteral: ${b}") - if (b.size == stackRegion.start.size) { - val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) - evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => - reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, b2), n) - } - } - case dataRegion: DataRegion => - val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start, globalOffsets), b) - evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => - reducedRegions ++= exprToRegion(b2, n) - } - case _ => - } - } - } - } - case _ => - } - reducedRegions - } - - /** - * Finds a region for a given expression using MMM results - * - * @param expr - * @param n - * @return Set[MemoryRegion]: a set of regions that the expression may be pointing to - */ - def exprToRegion(expr: Expr, n: Command): Set[MemoryRegion] = { - var res = Set[MemoryRegion]() - mmm.popContext() - mmm.pushContext(IRWalk.procedure(n).name) - expr match { // TODO: Stack detection here should be done in a better way or just merged with data - case binOp: BinaryExpr if binOp.arg1 == stackPointer => - evaluateExpressionWithSSA(binOp.arg2, constantProp(n), n, reachingDefs).foreach { b => - if binOp.arg2.variables.exists { v => v.sharedVariable } then { - Logger.debug("Shared stack object: " + b) - Logger.debug("Shared in: " + expr) - val regions = mmm.findSharedStackObject(b.value) - Logger.debug("found: " + regions) - res ++= regions - } else { - val region = mmm.findStackObject(b.value) - if (region.isDefined) { - res = res + region.get - } - } - } - res - case binaryExpr: BinaryExpr => - res ++= reducibleToRegion(binaryExpr, n) - res - case v: Variable if v == stackPointer => - res ++= mmm.findStackObject(0) - res - case v: Variable => - evaluateExpressionWithSSA(expr, constantProp(n), n, reachingDefs).foreach { b => - Logger.debug("BitVecLiteral: " + b) - val region = mmm.findDataObject(b.value) - if (region.isDefined) { - res += region.get - } - } - if (res.isEmpty) { // may be passed as param - val ctx = getUse(v, n, reachingDefs) - for (i <- ctx) { - i.rhs match { - case load: MemoryLoad => // treat as a region - res ++= exprToRegion(load.index, i) - case binaryExpr: BinaryExpr => - res ++= reducibleToRegion(binaryExpr, i) - case _ => // also treat as a region (for now) even if just Base + Offset without memLoad - res ++= exprToRegion(i.rhs, i) - } - } - } - res - case _ => - evaluateExpressionWithSSA(expr, constantProp(n), n, reachingDefs).foreach { b => - Logger.debug("BitVecLiteral: " + b) - val region = mmm.findDataObject(b.value) - if (region.isDefined) { - res += region.get - } - } - res - } - } - - /** Default implementation of eval. - */ - def eval(expr: Expr, cmd: Command): Expr = { - expr match - case literal: Literal => literal // ignore literals - case Extract(end, start, body) => - Extract(end, start, eval(body, cmd)) - case Repeat(repeats, body) => - Repeat(repeats, eval(body, cmd)) - case ZeroExtend(extension, body) => - ZeroExtend(extension, eval(body, cmd)) - case SignExtend(extension, body) => - SignExtend(extension, eval(body, cmd)) - case UnaryExpr(op, arg) => - UnaryExpr(op, eval(arg, cmd)) - case BinaryExpr(op, arg1, arg2) => - BinaryExpr(op, eval(arg1, cmd), eval(arg2, cmd)) - case MemoryStore(mem, index, value, endian, size) => - // TODO: index should be replaced region - val regions = exprToRegion(eval(index, cmd), cmd) - if (regions.size == 1) { - MemoryStore(Memory(regions.head.regionIdentifier, mem.addressSize, mem.valueSize), eval(index, cmd), eval(value, cmd), endian, size) - } else if (regions.size > 1) { - Logger.warn(s"MemStore is: ${cmd}") - Logger.warn(s"Multiple regions found for memory store: ${regions}") - MemoryStore(Memory(mmm.mergeRegions(regions).regionIdentifier, mem.addressSize, mem.valueSize), eval(index, cmd), eval(value, cmd), endian, size) - } else { - Logger.warn(s"MemStore is: ${cmd}") - Logger.warn(s"No region found for memory store") - expr - } - case MemoryLoad(mem, index, endian, size) => - // TODO: index should be replaced region - val regions = exprToRegion(eval(index, cmd), cmd) - if (regions.size == 1) { - MemoryLoad(Memory(regions.head.regionIdentifier, mem.addressSize, mem.valueSize), eval(index, cmd), endian, size) - } else if (regions.size > 1) { - Logger.warn(s"MemLoad is: ${cmd}") - Logger.warn(s"Multiple regions found for memory load: ${regions}") - MemoryLoad(Memory(mmm.mergeRegions(regions).regionIdentifier, mem.addressSize, mem.valueSize), eval(index, cmd), endian, size) - } else { - Logger.warn(s"MemLoad is: ${cmd}") - Logger.warn(s"No region found for memory load") - expr - } - case Memory(name, addressSize, valueSize) => - expr // ignore memory - case variable: Variable => variable // ignore variables - } - - /** Transfer function for state lattice elements. - */ - def localTransfer(n: CFGPosition): Unit = n match { - case cmd: Command => - cmd match - case statement: Statement => statement match - case assign: LocalAssign => - assign.rhs = eval(assign.rhs, cmd) - case mAssign: MemoryAssign => - mAssign.lhs = eval(mAssign.lhs, cmd).asInstanceOf[Memory] - mAssign.rhs = eval(mAssign.rhs, cmd).asInstanceOf[MemoryStore] - case nop: NOP => // ignore NOP - case assert: Assert => - assert.body = eval(assert.body, cmd) - case assume: Assume => - assume.body = eval(assume.body, cmd) - case jump: Jump => jump match - case to: GoTo => // ignore GoTo - case call: Call => call match - case call: DirectCall => // ignore DirectCall - case call: IndirectCall => // ignore IndirectCall - case _ => // ignore other kinds of nodes - } - - def transformMemorySections(memorySegment: ArrayBuffer[MemorySection]): ArrayBuffer[MemorySection] = { - val newArrayBuffer = ArrayBuffer.empty[MemorySection] - for (elem <- memorySegment) { - elem match { - case mem: MemorySection => - val regions = mmm.findDataObject(mem.address) - if (regions.size == 1) { - newArrayBuffer += MemorySection(regions.head.regionIdentifier, mem.address, mem.size, mem.bytes) - Logger.warn(s"Region ${regions.get.regionIdentifier} found for memory section ${mem.address}") - } else { - newArrayBuffer += mem - Logger.warn(s"No region found for memory section ${mem.address}") - } - case _ => - } - } - newArrayBuffer - } -} \ No newline at end of file +// case dataRegion: DataRegion => +// val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start, globalOffsets), b) +// evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => +// reducedRegions ++= exprToRegion(b2, n) +// } +// case _ => +// } +// } +// } +// } +// case _ => +// } +// reducedRegions +// } +// +// /** +// * Finds a region for a given expression using MMM results +// * +// * @param expr +// * @param n +// * @return Set[MemoryRegion]: a set of regions that the expression may be pointing to +// */ +// def exprToRegion(expr: Expr, n: Command): Set[MemoryRegion] = { +// var res = Set[MemoryRegion]() +// mmm.popContext() +// mmm.pushContext(IRWalk.procedure(n).name) +// expr match { // TODO: Stack detection here should be done in a better way or just merged with data +// case binOp: BinaryExpr if binOp.arg1 == stackPointer => +// evaluateExpressionWithSSA(binOp.arg2, constantProp(n), n, reachingDefs).foreach { b => +// if binOp.arg2.variables.exists { v => v.sharedVariable } then { +// Logger.debug("Shared stack object: " + b) +// Logger.debug("Shared in: " + expr) +// val regions = mmm.findSharedStackObject(b.value) +// Logger.debug("found: " + regions) +// res ++= regions +// } else { +// val region = mmm.findStackObject(b.value) +// if (region.isDefined) { +// res = res + region.get +// } +// } +// } +// res +// case binaryExpr: BinaryExpr => +// res ++= reducibleToRegion(binaryExpr, n) +// res +// case v: Variable if v == stackPointer => +// res ++= mmm.findStackObject(0) +// res +// case v: Variable => +// evaluateExpressionWithSSA(expr, constantProp(n), n, reachingDefs).foreach { b => +// Logger.debug("BitVecLiteral: " + b) +// val region = mmm.findDataObject(b.value) +// if (region.isDefined) { +// res += region.get +// } +// } +// if (res.isEmpty) { // may be passed as param +// val ctx = getUse(v, n, reachingDefs) +// for (i <- ctx) { +// i.rhs match { +// case load: MemoryLoad => // treat as a region +// res ++= exprToRegion(load.index, i) +// case binaryExpr: BinaryExpr => +// res ++= reducibleToRegion(binaryExpr, i) +// case _ => // also treat as a region (for now) even if just Base + Offset without memLoad +// res ++= exprToRegion(i.rhs, i) +// } +// } +// } +// res +// case _ => +// evaluateExpressionWithSSA(expr, constantProp(n), n, reachingDefs).foreach { b => +// Logger.debug("BitVecLiteral: " + b) +// val region = mmm.findDataObject(b.value) +// if (region.isDefined) { +// res += region.get +// } +// } +// res +// } +// } +// +// /** Default implementation of eval. +// */ +// def eval(expr: Expr, cmd: Command): Expr = { +// expr match +// case literal: Literal => literal // ignore literals +// case Extract(end, start, body) => +// Extract(end, start, eval(body, cmd)) +// case Repeat(repeats, body) => +// Repeat(repeats, eval(body, cmd)) +// case ZeroExtend(extension, body) => +// ZeroExtend(extension, eval(body, cmd)) +// case SignExtend(extension, body) => +// SignExtend(extension, eval(body, cmd)) +// case UnaryExpr(op, arg) => +// UnaryExpr(op, eval(arg, cmd)) +// case BinaryExpr(op, arg1, arg2) => +// BinaryExpr(op, eval(arg1, cmd), eval(arg2, cmd)) +// case MemoryAssign(mem, index, value, endian, size) => +// // TODO: index should be replaced region +// val regions = exprToRegion(eval(index, cmd), cmd) +// if (regions.size == 1) { +// MemoryAssign(Memory(regions.head.regionIdentifier, mem.addressSize, mem.valueSize), eval(index, cmd), eval(value, cmd), endian, size) +// } else if (regions.size > 1) { +// Logger.warn(s"MemStore is: ${cmd}") +// Logger.warn(s"Multiple regions found for memory store: ${regions}") +// MemoryAssign(Memory(mmm.mergeRegions(regions).regionIdentifier, mem.addressSize, mem.valueSize), eval(index, cmd), eval(value, cmd), endian, size) +// } else { +// Logger.warn(s"MemStore is: ${cmd}") +// Logger.warn(s"No region found for memory store") +// expr +// } +// case MemoryLoad(mem, index, endian, size) => +// // TODO: index should be replaced region +// val regions = exprToRegion(eval(index, cmd), cmd) +// if (regions.size == 1) { +// MemoryLoad(Memory(regions.head.regionIdentifier, mem.addressSize, mem.valueSize), eval(index, cmd), endian, size) +// } else if (regions.size > 1) { +// Logger.warn(s"MemLoad is: ${cmd}") +// Logger.warn(s"Multiple regions found for memory load: ${regions}") +// MemoryLoad(Memory(mmm.mergeRegions(regions).regionIdentifier, mem.addressSize, mem.valueSize), eval(index, cmd), endian, size) +// } else { +// Logger.warn(s"MemLoad is: ${cmd}") +// Logger.warn(s"No region found for memory load") +// expr +// } +// case Memory(name, addressSize, valueSize) => +// expr // ignore memory +// case variable: Variable => variable // ignore variables +// } +// +// /** Transfer function for state lattice elements. +// */ +// def localTransfer(n: CFGPosition): Unit = n match { +// case cmd: Command => +// cmd match +// case statement: Statement => statement match +// case assign: Assign => +// assign.rhs = eval(assign.rhs, cmd) +// case mAssign: MemoryAssign => +// mAssign.mem = eval(mAssign.mem, cmd).asInstanceOf[Memory] +// mAssign.index = eval(mAssign.index, cmd) +// mAssign.value = eval(mAssign.value, cmd) +// case nop: NOP => // ignore NOP +// case assert: Assert => +// assert.body = eval(assert.body, cmd) +// case assume: Assume => +// assume.body = eval(assume.body, cmd) +// case jump: Jump => jump match +// case to: GoTo => // ignore GoTo +// case call: Call => call match +// case call: DirectCall => // ignore DirectCall +// case call: IndirectCall => // ignore IndirectCall +// case _ => // ignore other kinds of nodes +// } +// +// def transformMemorySections(memorySegment: ArrayBuffer[MemorySection]): ArrayBuffer[MemorySection] = { +// val newArrayBuffer = ArrayBuffer.empty[MemorySection] +// for (elem <- memorySegment) { +// elem match { +// case mem: MemorySection => +// val regions = mmm.findDataObject(mem.address) +// if (regions.size == 1) { +// newArrayBuffer += MemorySection(regions.head.regionIdentifier, mem.address, mem.size, mem.bytes) +// Logger.warn(s"Region ${regions.get.regionIdentifier} found for memory section ${mem.address}") +// } else { +// newArrayBuffer += mem +// Logger.warn(s"No region found for memory section ${mem.address}") +// } +// case _ => +// } +// } +// newArrayBuffer +// } +//} \ No newline at end of file diff --git a/src/main/scala/analysis/UtilMethods.scala b/src/main/scala/analysis/UtilMethods.scala index 278c8c4ca..6b2261c9d 100644 --- a/src/main/scala/analysis/UtilMethods.scala +++ b/src/main/scala/analysis/UtilMethods.scala @@ -83,14 +83,13 @@ def evaluateExpression(exp: Expr, constantPropResult: Map[Variable, FlatElement[ * @param exactEquality * @return */ -def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], n: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], exactEquality: Boolean = true): Set[BitVecLiteral] = { +def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], n: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], exactEquality: Boolean = true): Set[BitVecLiteral] = { def apply(op: (BitVecLiteral, BitVecLiteral) => BitVecLiteral, a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = val res = for { x <- a y <- b } yield op(x, y) res - } def applySingle(op: BitVecLiteral => BitVecLiteral, a: Set[BitVecLiteral]): Set[BitVecLiteral] = { val res = for { @@ -161,24 +160,24 @@ def getUse(variable: Variable, node: CFGPosition, reachingDefs: Map[CFGPosition, out.getOrElse(variable, Set()) } -///** -// * In expressions that have accesses within a region, we need to relocate -// * the base address to the actual address using the relocation table. -// * MUST RELOCATE because MMM iterate to find the lowest address -// * TODO: May need to iterate over the relocation table to find the actual address -// * -// * @param address -// * @param globalOffsets -// * @return BitVecLiteral: the relocated address -// */ -//def relocatedBase(address: BitVecLiteral, globalOffsets: Map[BigInt, BigInt]): BitVecLiteral = { -// val tableAddress = globalOffsets.getOrElse(address.value, address.value) -// // this condition checks if the address is not layered and returns if it is not -// if (tableAddress != address.value && !globalOffsets.contains(tableAddress)) { -// return address -// } -// BitVecLiteral(tableAddress, address.size) -//} +/** + * In expressions that have accesses within a region, we need to relocate + * the base address to the actual address using the relocation table. + * MUST RELOCATE because MMM iterate to find the lowest address + * TODO: May need to iterate over the relocation table to find the actual address + * + * @param address + * @param globalOffsets + * @return BitVecLiteral: the relocated address + */ +def relocatedBase(address: BitVecLiteral, globalOffsets: Map[BigInt, BigInt]): BitVecLiteral = { + val tableAddress = globalOffsets.getOrElse(address.value, address.value) + // this condition checks if the address is not layered and returns if it is not + if (tableAddress != address.value && !globalOffsets.contains(tableAddress)) { + return address + } + BitVecLiteral(tableAddress, address.size) +} def unwrapExpr(expr: Expr): Set[Expr] = { var buffers: Set[Expr] = Set() diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index e68eaf58b..810e00867 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -495,7 +495,7 @@ object IRTransform { // do reachability analysis // also need a bit in the IR where it creates separate files def splitThreads(program: Program, - pointsTo: Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]], + pointsTo: Map[RegisterWrapperPartialEquality, Set[RegisterWrapperPartialEquality | MemoryRegion]], regionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])] ): Unit = { @@ -510,7 +510,7 @@ object IRTransform { // look up R2 value using points to results val R2 = Register("R2", 64) val b = reachingDefs(d) - val R2Wrapper = RegisterVariableWrapper(R2, getDefinition(R2, d, reachingDefs)) + val R2Wrapper = RegisterWrapperPartialEquality(R2, getDefinition(R2, d, reachingDefs)) val threadTargets = pointsTo(R2Wrapper) if (threadTargets.size > 1) { @@ -729,8 +729,8 @@ object StaticAnalysis { mmm.logRegions() Logger.info("[!] Injecting regions") - val regionInjector = RegionInjector(domain, IRProgram, constPropResultWithSSA, mmm, reachingDefinitionsAnalysisResults, globalOffsets) - regionInjector.nodeVisitor() +// val regionInjector = RegionInjector(domain, IRProgram, constPropResultWithSSA, mmm, reachingDefinitionsAnalysisResults, globalOffsets) +// regionInjector.nodeVisitor() Logger.info("[!] Running Steensgaard") val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResultWithSSA, regionAccessesAnalysisResults, mmm, reachingDefinitionsAnalysisResults, globalOffsets) From 6e1b51dac5aa732503c128f2b519a74bbc0089cc Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Fri, 23 Aug 2024 15:24:40 +1000 Subject: [PATCH 038/104] Fixed MRA loop bug --- .../scala/analysis/MemoryRegionAnalysis.scala | 30 ++++++++++++++----- src/main/scala/util/RunUtils.scala | 6 ++-- 2 files changed, 26 insertions(+), 10 deletions(-) diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 553c4bd82..64446f71e 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -99,7 +99,7 @@ trait MemoryRegionAnalysis(val program: Program, var reducedRegions = Set.empty[MemoryRegion] if (depthMap.contains(n)) { if (depthMap(n) > maxDepth) { - depthMap += (n -> 0) + //depthMap += (n -> 0) return reducedRegions } } else { @@ -196,12 +196,15 @@ trait MemoryRegionAnalysis(val program: Program, case reg: Register if spList.contains(reg) => regionsToReturn.addAll(Set(poolMaster(BitVecLiteral(0, 64), IRWalk.procedure(n)))) case _ => - for (elem <- evaluateExpressionWithSSA(variable, constantProp(n), n, reachingDefs, exactMatch)) { + val ssaEval = evaluateExpressionWithSSA(variable, constantProp(n), n, reachingDefs, exactMatch) + for (elem <- ssaEval) { elem match { case b: BitVecLiteral => regionsToReturn.addAll(eval(b, env, n)) - case _ => reducibleVariable(variable, n) } } + if (ssaEval.isEmpty) { + regionsToReturn.addAll(reducibleVariable(variable, n)) + } } case memoryLoad: MemoryLoad => regionsToReturn.addAll(eval(memoryLoad.index, env, n)) @@ -218,9 +221,8 @@ trait MemoryRegionAnalysis(val program: Program, regionsToReturn.addAll(eval(signExtend.body, env, n)) case unaryExpr: UnaryExpr => regionsToReturn.addAll(eval(unaryExpr.arg, env, n)) - case memoryStore: MemoryAssign => - regionsToReturn.addAll(eval(memoryStore.index, env, n) ++ eval(memoryStore.value, env, n)) - case memory: Memory => + case uninterpretedFunction: UninterpretedFunction => + uninterpretedFunction.params.foreach(unExpr => regionsToReturn.addAll(eval(unExpr, env, n))) } regionsToReturn.toSet } @@ -258,7 +260,7 @@ trait MemoryRegionAnalysis(val program: Program, } } case memAssign: MemoryAssign => - val result = eval(memAssign.index, m, cmd) + val result = eval(memAssign.index, m, cmd) ++ eval(memAssign.value, m, cmd) m = regionLattice.lub(m, result) case localAssign: Assign => stackDetection(localAssign) @@ -300,6 +302,20 @@ class MemoryRegionAnalysisSolver( } } +/** + * + * @param program + * @param globals + * @param globalOffsets + * @param subroutines + * @param constantProp + * @param ANRResult + * @param RNAResult + * @param regionAccesses + * @param reachingDefs + * @param maxDepth: Used in a case of a loop unfolding for MRA purposes only because addresses created in a loop could be infinite + * @param exactMatch: If true, SSA variables are matched by matching exact set of definitions. Otherwise, loose match by overlapping sets is used + */ class InterprocMemoryRegionAnalysisSolver( program: Program, globals: Map[BigInt, String], diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 015ee2b81..a7e4c162a 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -729,12 +729,12 @@ object StaticAnalysis { Logger.info("[!] Running MMM") val mmm = MemoryModelMap() - mmm.convertMemoryRegions(mraResult, mergedSubroutines, globalOffsets, mraSolver.procedureToSharedRegions) + mmm.convertMemoryRegions(mraResult, mergedSubroutines, globalOffsets, globalAddresses, mraSolver.procedureToSharedRegions) mmm.logRegions() Logger.info("[!] Injecting regions") -// val regionInjector = RegionInjector(domain, IRProgram, constPropResultWithSSA, mmm, reachingDefinitionsAnalysisResults, globalOffsets) -// regionInjector.nodeVisitor() + val regionInjector = RegionInjector(domain, IRProgram, constPropResultWithSSA, mmm, reachingDefinitionsAnalysisResults, globalOffsets) + regionInjector.nodeVisitor() Logger.info("[!] Running Steensgaard") val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResultWithSSA, regionAccessesAnalysisResults, mmm, reachingDefinitionsAnalysisResults, globalOffsets) From 745245d4994fddc2a3c998e2db923d3b61cc0c9d Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Fri, 23 Aug 2024 15:43:41 +1000 Subject: [PATCH 039/104] ReEnabled shared regions --- .../InterprocSteensgaardAnalysis.scala | 1 - .../scala/analysis/MemoryRegionAnalysis.scala | 32 ++++++++--------- .../scala/analysis/RegToMemAnalysis.scala | 34 +++++++++---------- src/main/scala/util/RunUtils.scala | 7 ++-- 4 files changed, 35 insertions(+), 39 deletions(-) diff --git a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala index f5abe7196..7fcf99ca2 100644 --- a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala +++ b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala @@ -49,7 +49,6 @@ case class RegisterWrapperEqualSets(variable: Variable, assigns: Set[Assign]) ex class InterprocSteensgaardAnalysis( program: Program, constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], - regionAccesses: Map[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]], mmm: MemoryModelMap, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], globalOffsets: Map[BigInt, BigInt]) extends Analysis[Any] { diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 64446f71e..1caf921d4 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -15,7 +15,7 @@ trait MemoryRegionAnalysis(val program: Program, val constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], val ANRResult: Map[CFGPosition, Set[Variable]], val RNAResult: Map[CFGPosition, Set[Variable]], - val regionAccesses: Map[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]], + val regionAccesses: Map[CFGPosition, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]], val reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], val maxDepth: Int, val exactMatch: Boolean) { @@ -239,19 +239,19 @@ trait MemoryRegionAnalysis(val program: Program, val RNA = RNAResult(program.procedures.filter(fn => fn == directCall.target).head) val parameters = RNA.intersect(ANR) // TODO: Re-enable when ReachingDef has interprocedural option -// val ctx = regionAccesses(cmd) -// for (elem <- parameters) { -// if (ctx.contains(RegisterVariableWrapper(elem, getUse(elem, cmd.data, reachingDefs)))) { -// ctx(RegisterVariableWrapper(elem, getUse(elem, cmd.data, reachingDefs))) match { -// case FlatEl(al) => -// val regions = eval(al, s, cmd) -// //val targetMap = stackMap(directCall.target) -// //cfg.funEntries.filter(fn => fn.data == directCall.target).head -// procedureToSharedRegions.getOrElseUpdate(directCall.target, mutable.Set.empty).addAll(regions) -// registerToRegions.getOrElseUpdate(RegisterVariableWrapper(elem, getUse(elem, cmd.data, reachingDefs)), mutable.Set.empty).addAll(regions) -// } -// } -// } + val ctx = regionAccesses(cmd) + for (elem <- parameters) { + if (ctx.contains(RegisterWrapperPartialEquality(elem, getUse(elem, cmd, reachingDefs)))) { + ctx(RegisterWrapperPartialEquality(elem, getUse(elem, cmd, reachingDefs))) match { + case FlatEl(al) => + val regions = eval(al, s, cmd) + //val targetMap = stackMap(directCall.target) + //cfg.funEntries.filter(fn => fn.data == directCall.target).head + procedureToSharedRegions.getOrElseUpdate(directCall.target, mutable.Set.empty).addAll(regions) + registerToRegions.getOrElseUpdate(RegisterWrapperPartialEquality(elem, getUse(elem, cmd, reachingDefs)), mutable.Set.empty).addAll(regions) + } + } + } if (directCall.target.name == "malloc") { for (elem <- evaluateExpressionWithSSA(mallocVariable, constantProp(n), n, reachingDefs, exactMatch)) { elem match { @@ -283,7 +283,7 @@ class MemoryRegionAnalysisSolver( constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], ANRResult: Map[CFGPosition, Set[Variable]], RNAResult: Map[CFGPosition, Set[Variable]], - regionAccesses: Map[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]], + regionAccesses: Map[CFGPosition, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], maxDepth: Int, exactMatch: Boolean = true @@ -324,7 +324,7 @@ class InterprocMemoryRegionAnalysisSolver( constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], ANRResult: Map[CFGPosition, Set[Variable]], RNAResult: Map[CFGPosition, Set[Variable]], - regionAccesses: Map[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]], + regionAccesses: Map[CFGPosition, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], maxDepth: Int, exactMatch: Boolean = false diff --git a/src/main/scala/analysis/RegToMemAnalysis.scala b/src/main/scala/analysis/RegToMemAnalysis.scala index 1b6f9b51c..1b30c6390 100644 --- a/src/main/scala/analysis/RegToMemAnalysis.scala +++ b/src/main/scala/analysis/RegToMemAnalysis.scala @@ -15,27 +15,27 @@ import scala.collection.immutable * * Both in which constant propagation mark as TOP which is not useful. */ -trait RegionAccessesAnalysis(cfg: ProgramCfg, constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]) { +trait RegionAccessesAnalysis(program: Program, constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]) { val mapLattice: MapLattice[RegisterWrapperPartialEquality, FlatElement[Expr], FlatLattice[Expr]] = MapLattice(FlatLattice[_root_.ir.Expr]()) - val lattice: MapLattice[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]], MapLattice[RegisterWrapperPartialEquality, FlatElement[Expr], FlatLattice[Expr]]] = MapLattice(mapLattice) + val lattice: MapLattice[CFGPosition, Map[RegisterWrapperPartialEquality, FlatElement[Expr]], MapLattice[RegisterWrapperPartialEquality, FlatElement[Expr], FlatLattice[Expr]]] = MapLattice(mapLattice) - val domain: Set[CfgNode] = cfg.nodes.toSet + val domain: Set[CFGPosition] = Set.empty ++ program - val first: Set[CfgNode] = Set(cfg.startNode) + val first: Set[CFGPosition] = Set.empty ++ program.procedures /** Default implementation of eval. */ - def eval(cmd: CfgCommandNode, constants: Map[Variable, FlatElement[BitVecLiteral]], s: Map[RegisterWrapperPartialEquality, FlatElement[Expr]]): Map[RegisterWrapperPartialEquality, FlatElement[Expr]] = { - cmd.data match { + def eval(cmd: Command, constants: Map[Variable, FlatElement[BitVecLiteral]], s: Map[RegisterWrapperPartialEquality, FlatElement[Expr]]): Map[RegisterWrapperPartialEquality, FlatElement[Expr]] = { + cmd match { case assign: Assign => assign.rhs match { case memoryLoad: MemoryLoad => - s + (RegisterWrapperPartialEquality(assign.lhs, getDefinition(assign.lhs, cmd.data, reachingDefs)) -> FlatEl(memoryLoad)) + s + (RegisterWrapperPartialEquality(assign.lhs, getDefinition(assign.lhs, cmd, reachingDefs)) -> FlatEl(memoryLoad)) case binaryExpr: BinaryExpr => if (evaluateExpression(binaryExpr.arg1, constants).isEmpty) { // approximates Base + Offset - s + (RegisterWrapperPartialEquality(assign.lhs, getDefinition(assign.lhs, cmd.data, reachingDefs)) -> FlatEl(binaryExpr)) + s + (RegisterWrapperPartialEquality(assign.lhs, getDefinition(assign.lhs, cmd, reachingDefs)) -> FlatEl(binaryExpr)) } else { s } @@ -48,23 +48,23 @@ trait RegionAccessesAnalysis(cfg: ProgramCfg, constantProp: Map[CFGPosition, Map /** Transfer function for state lattice elements. */ - def localTransfer(n: CfgNode, s: Map[RegisterWrapperPartialEquality, FlatElement[Expr]]): Map[RegisterWrapperPartialEquality, FlatElement[Expr]] = n match { - case cmd: CfgCommandNode => - eval(cmd, constantProp(cmd.data), s) + def localTransfer(n: CFGPosition, s: Map[RegisterWrapperPartialEquality, FlatElement[Expr]]): Map[RegisterWrapperPartialEquality, FlatElement[Expr]] = n match { + case cmd: Command => + eval(cmd, constantProp(cmd), s) case _ => s // ignore other kinds of nodes } /** Transfer function for state lattice elements. */ - def transfer(n: CfgNode, s: Map[RegisterWrapperPartialEquality, FlatElement[Expr]]): Map[RegisterWrapperPartialEquality, FlatElement[Expr]] = localTransfer(n, s) + def transfer(n: CFGPosition, s: Map[RegisterWrapperPartialEquality, FlatElement[Expr]]): Map[RegisterWrapperPartialEquality, FlatElement[Expr]] = localTransfer(n, s) } class RegionAccessesAnalysisSolver( - cfg: ProgramCfg, + program: Program, constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - ) extends RegionAccessesAnalysis(cfg, constantProp, reachingDefs) - with InterproceduralForwardDependencies - with Analysis[Map[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]]] - with SimpleWorklistFixpointSolver[CfgNode, Map[RegisterWrapperPartialEquality, FlatElement[Expr]], MapLattice[RegisterWrapperPartialEquality, FlatElement[Expr], FlatLattice[Expr]]] { + ) extends RegionAccessesAnalysis(program, constantProp, reachingDefs) + with IRInterproceduralForwardDependencies + with Analysis[Map[CFGPosition, Map[RegisterWrapperPartialEquality, FlatElement[Expr]]]] + with SimpleWorklistFixpointSolver[CFGPosition, Map[RegisterWrapperPartialEquality, FlatElement[Expr]], MapLattice[RegisterWrapperPartialEquality, FlatElement[Expr], FlatLattice[Expr]]] { } \ No newline at end of file diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index a7e4c162a..5f1963eab 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -709,14 +709,11 @@ object StaticAnalysis { Logger.info("[!] Running RegToMemAnalysisSolver") - val regionAccessesAnalysisSolver = RegionAccessesAnalysisSolver(cfg, constPropResult, reachingDefinitionsAnalysisResults) + val regionAccessesAnalysisSolver = RegionAccessesAnalysisSolver(IRProgram, constPropResult, reachingDefinitionsAnalysisResults) val regionAccessesAnalysisResults = regionAccessesAnalysisSolver.analyze() println(s"Finished region accesses at ${(System.nanoTime() - before) / 1000000} ms") - config.analysisDotPath.foreach(s => writeToFile(cfg.toDot(Output.labeler(regionAccessesAnalysisResults, true), Output.dotIder), s"${s}_RegTo$iteration.dot")) - config.analysisResultsPath.foreach(s => writeToFile(printAnalysisResults(cfg, regionAccessesAnalysisResults, iteration), s"${s}_RegTo$iteration.txt")) - Logger.info("[!] Running Constant Propagation with SSA") val constPropSolverWithSSA = ConstantPropagationSolverWithSSA(IRProgram, reachingDefinitionsAnalysisResults) val constPropResultWithSSA = constPropSolverWithSSA.analyze() @@ -737,7 +734,7 @@ object StaticAnalysis { regionInjector.nodeVisitor() Logger.info("[!] Running Steensgaard") - val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResultWithSSA, regionAccessesAnalysisResults, mmm, reachingDefinitionsAnalysisResults, globalOffsets) + val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResultWithSSA, mmm, reachingDefinitionsAnalysisResults, globalOffsets) steensgaardSolver.analyze() val steensgaardResults = steensgaardSolver.pointsTo() val memoryRegionContents = steensgaardSolver.getMemoryRegionContents From 78df35403492f65c828e8577ae5c596d4f4732c1 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 26 Aug 2024 10:54:07 +1000 Subject: [PATCH 040/104] debugged internal offset tracking --- src/main/scala/analysis/DSAUtility.scala | 50 +++++++++++++++++------- src/test/scala/LocalTest.scala | 8 +++- 2 files changed, 41 insertions(+), 17 deletions(-) diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index bf67cf7a2..ae1b5587d 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -547,15 +547,17 @@ class DSG(val proc: Procedure, if outgoing.size == 1 then collapsedCell._pointee = Some(outgoing.head) else if outgoing.size > 1 then - var internal = outgoing.head._2 - val result = outgoing.tail.foldLeft(outgoing.head._1){ +// var internal = outgoing.head._2 + val result = outgoing.tail.foldLeft(adjust(outgoing.head)){ (result, pointee) => - val cell = pointee._1 - val pointeeInternal = pointee._2 - internal = internal.max(pointeeInternal) - mergeCells(result, cell) +// val cell = pointee._1 +// val pointeeInternal = pointee._2 +// internal = internal.max(pointeeInternal) + mergeCells(result, adjust(pointee)) } - collapsedCell._pointee = Some(Slice(result, internal)) + + + collapsedCell._pointee = Some(deadjust(result)) } solver.unify(node1.term, resultNode.term, 0) @@ -577,6 +579,15 @@ class DSG(val proc: Procedure, val internal = slice.internalOffset adjust(cell, internal + offset) + def deadjust(cell: DSC) : Slice = + val node = cell.node.get + val offset = cell.offset + selfCollapse(node) + val newCell = node.getCell(offset) + assert(offset >= newCell.offset) + Slice(newCell, offset - newCell.offset) + + private def isFormal(pos: CFGPosition, variable: Variable): Boolean = !reachingDefs(pos).contains(variable) @@ -863,14 +874,23 @@ case class DSC(node: Option[DSN], offset: BigInt) val node = DSN(Some(this.node.get.graph.get)) _pointee = Some(Slice(node.cells(0), 0)) else - val slice = _pointee.get - var node = slice.node - val graph = node.graph.get - val link = graph.solver.find(node.term) - node = link._1.asInstanceOf[Derm].node - val offset = link._2 - val cell = node.addCell(offset + slice.cell.offset, slice.cell.largestAccessedSize) - _pointee = Some(Slice(cell, slice.internalOffset)) + +// val node = cell.node.get +// val offset = cell.offset +// val parent: Field = find(node) +// parent.node.addCell(cell.offset + parent.offset, cell.largestAccessedSize) + + +// val slice = _pointee.get +// var node = slice.node + val graph = _pointee.get.node.graph.get + val resolvedPointee = graph.find(graph.adjust(_pointee.get)) + +// val link = graph.solver.find(node.term) +// node = link._1.asInstanceOf[Derm].node +// val offset = link._2 +// val cell = node.addCell(offset + slice.cell.offset, slice.cell.largestAccessedSize) + _pointee = Some(graph.deadjust(resolvedPointee)) _pointee.get def growSize(size: BigInt): Boolean = diff --git a/src/test/scala/LocalTest.scala b/src/test/scala/LocalTest.scala index 2d7e67d00..73f665a94 100644 --- a/src/test/scala/LocalTest.scala +++ b/src/test/scala/LocalTest.scala @@ -249,8 +249,12 @@ class LocalTest extends AnyFunSuite, TestUtil { assert(stack48.node.get.allocationRegions.head.asInstanceOf[HeapLocation].size == 8) assert(dsg.adjust(stack48.getPointee).equals(stack40)) assert(dsg.adjust(stack48.getPointee).equals(stack56)) - assert(stack24.equals(stack40)) - assert(stack40.offset == 1) // todo check + println(dsg.find(dsg.stackMapping(24).cells(0)).getPointee) + println(dsg.find(dsg.stackMapping(40).cells(0)).getPointee) + assert(dsg.deadjust(stack24).cell.equals(dsg.deadjust(stack40).cell)) // these are pointees, they should point to the same cell at different offsets + assert(dsg.deadjust(stack40).internalOffset == 1) // todo check + assert(dsg.deadjust(stack24).internalOffset == 0) + assert(dsg.deadjust(stack24).offset == 0) } // // test("interproc pointer arithmetic main") { From 1b47aa9766f001d4a9dd7c940b9325b2d162e8eb Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 26 Aug 2024 16:57:18 +1000 Subject: [PATCH 041/104] tests --- src/main/scala/analysis/DSA.scala | 298 +++--- src/main/scala/analysis/DSAUtility.scala | 210 +--- src/test/scala/LocalTest.scala | 1247 +++++++++++----------- 3 files changed, 813 insertions(+), 942 deletions(-) diff --git a/src/main/scala/analysis/DSA.scala b/src/main/scala/analysis/DSA.scala index c7569c2bf..cd4ece1c5 100644 --- a/src/main/scala/analysis/DSA.scala +++ b/src/main/scala/analysis/DSA.scala @@ -69,154 +69,162 @@ class DSA(program: Program, val dsg = Local(proc, symResults, constProp, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params).analyze() locals.update(proc, dsg) -// bu.update(proc, dsg.cloneSelf()) + bu.update(proc, dsg.cloneSelf()) ) Map() -// val leafNodes = findLeaf(program.mainProcedure) -// -// leafNodes.foreach( -// proc => -// assert(locals(proc).callsites.isEmpty) -// visited += proc -// val preds : Set[Procedure] = CallGraph.pred(proc) -// queue.enqueueAll(CallGraph.pred(proc).diff(visited).intersect(domain)) -// ) -// -// // bottom up phase -// while queue.nonEmpty do -// val proc = queue.dequeue() -// visited += proc -// queue.enqueueAll(CallGraph.pred(proc).diff(visited)) -// val buGraph = bu(proc) -// -// buGraph.callsites.foreach( -// callSite => -// val callee = callSite.proc -// val calleeGraph = locals(callee) //.cloneSelf() -// assert(buGraph.globalMapping.keySet.equals(calleeGraph.globalMapping.keySet)) -// assert(calleeGraph.formals.keySet.diff(ignoreRegisters).equals(callSite.paramCells.keySet)) -// -// calleeGraph.globalMapping.foreach { -// case (range: AddressRange, Field(node, offset)) => -// node.cloneNode(calleeGraph, buGraph) -// } -// -// calleeGraph.formals.foreach{ -// case (variable: Variable, slice: Slice) if !ignoreRegisters.contains(variable) => -// assert(callSite.paramCells.contains(variable)) -// val node = slice.node -// node.cloneNode(calleeGraph, buGraph) -// case _ => -// } -// -// assert(writesTo(callee).equals(callSite.returnCells.keySet)) -// writesTo(callee).foreach( -// reg => -// assert(callSite.returnCells.contains(reg)) -// val returnCells = calleeGraph.getCells(end(callee), reg) -// assert(returnCells.nonEmpty) -// returnCells.foreach{ -// case slice: Slice => -// val node = slice.node -// node.cloneNode(calleeGraph, buGraph) -// } -// ) -// -//// assert(calleeGraph.formals.isEmpty || buGraph.varToCell(begin(callee)).equals(calleeGraph.formals)) -// val globalNodes: mutable.Map[Int, DSN] = mutable.Map() -// calleeGraph.globalMapping.foreach { -// case (range: AddressRange, Field(node: DSN, offset: BigInt)) => -// buGraph.mergeCells(buGraph.globalMapping(range)._1.getCell(buGraph.globalMapping(range)._2), -// node.getCell(offset)) -// } -// -// buGraph.varToCell.getOrElse(begin(callee), Map.empty).foreach{ -// case (variable: Variable, formal) if !ignoreRegisters.contains(variable) => -// buGraph.mergeCells(buGraph.adjust(formal), buGraph.adjust(callSite.paramCells(variable))) -// case _ => -// } -// writesTo(callee).foreach( -// reg => -// val returnCells = buGraph.getCells(end(callee), reg) -// // assert(returnCells.nonEmpty) -// val result: DSC = returnCells.foldLeft(buGraph.adjust(callSite.returnCells(reg))){ -// // -// case (c: DSC, ret) => -// buGraph.mergeCells(c, buGraph.adjust(ret)) -// } -// ) -// ) -// buGraph.collectNodes -// // bottom up phase finished -// // clone bu graphs to top-down graphs -// domain.foreach( -// proc => -// td.update(proc, bu(proc).cloneSelf()) -// ) -// -// queue.enqueue(program.mainProcedure) -// visited = Set() -// -// -// // top-down phase -// while queue.nonEmpty do -// val proc = queue.dequeue() -// visited += proc -// queue.enqueueAll(CallGraph.succ(proc).diff(visited)) -// val callersGraph = td(proc) -// callersGraph.callsites.foreach( -// callSite => -// val callee = callSite.proc -// val calleesGraph = td(callee) -// assert(callersGraph.globalMapping.keySet.equals(calleesGraph.globalMapping.keySet)) -// -// callersGraph.globalMapping.foreach { -// case (range: AddressRange, Field(node, offset)) => -// node.cloneNode(callersGraph, calleesGraph) -// } -// -// -// callSite.paramCells.foreach{ -// case (variable: Variable, slice: Slice) => -// val node = slice.node -// node.cloneNode(callersGraph, calleesGraph) -// } -// -// callSite.returnCells.foreach{ -// case (variable: Variable, slice: Slice) => -// val node = slice.node -// node.cloneNode(callersGraph, callersGraph) -// } -// -// -// callersGraph.globalMapping.foreach { -// case (range: AddressRange, Field(node, internal)) => -// calleesGraph.mergeCells(calleesGraph.globalMapping(range)._1.getCell(calleesGraph.globalMapping(range)._2), -// node.getCell(internal)) -// } -// -// callSite.paramCells.keySet.foreach( -// variable => -// val paramCells = calleesGraph.getCells(callSite.call, variable) -// paramCells.foldLeft(calleesGraph.adjust(calleesGraph.formals(variable))) { -// (cell, slice) => -// calleesGraph.mergeCells(calleesGraph.adjust(slice), cell) -// } -// ) -// -// calleesGraph.varToCell.getOrElse(callSite.call, Map.empty).foreach{ -// case (variable: Variable, cell: Slice) => -// val returnCells = calleesGraph.getCells(end(callee), variable) -// returnCells.foldLeft(calleesGraph.adjust(cell)){ -// case (c: DSC, retCell: Slice) => -// calleesGraph.mergeCells(c, calleesGraph.adjust(retCell)) -// } -// case _ => ??? -// } -// ) -// callersGraph.collectNodes -// td.toMap + val leafNodes = findLeaf(program.mainProcedure) + + leafNodes.foreach( + proc => + assert(locals(proc).callsites.isEmpty) + visited += proc + val preds : Set[Procedure] = CallGraph.pred(proc) + queue.enqueueAll(CallGraph.pred(proc).diff(visited).intersect(domain)) + ) + + // bottom up phase + while queue.nonEmpty do + val proc = queue.dequeue() + visited += proc + queue.enqueueAll(CallGraph.pred(proc).diff(visited)) + val buGraph = bu(proc) + + buGraph.callsites.foreach( + callSite => + val callee = callSite.proc + val calleeGraph = locals(callee) //.cloneSelf() + assert(buGraph.globalMapping.keySet.equals(calleeGraph.globalMapping.keySet)) + assert(calleeGraph.formals.keySet.diff(ignoreRegisters).equals(callSite.paramCells.keySet)) + + calleeGraph.globalMapping.foreach { + case (range: AddressRange, Field(node, offset)) => + val newNode = calleeGraph.find(node).node + newNode.cloneNode(calleeGraph, buGraph) + } + + calleeGraph.formals.foreach{ + case (variable: Variable, slice: Slice) if !ignoreRegisters.contains(variable) => + assert(callSite.paramCells.contains(variable)) + val node = calleeGraph.find(slice).node + node.cloneNode(calleeGraph, buGraph) + case _ => + } + + assert(writesTo(callee).equals(callSite.returnCells.keySet)) + writesTo(callee).foreach( + reg => + assert(callSite.returnCells.contains(reg)) + val returnCells = calleeGraph.getCells(end(callee), reg).map(calleeGraph.find) + assert(returnCells.nonEmpty) + returnCells.foreach{ + case slice: Slice => + val node = calleeGraph.find(slice).node + node.cloneNode(calleeGraph, buGraph) + } + ) + +// assert(calleeGraph.formals.isEmpty || buGraph.varToCell(begin(callee)).equals(calleeGraph.formals)) + val globalNodes: mutable.Map[Int, DSN] = mutable.Map() + calleeGraph.globalMapping.foreach { + case (range: AddressRange, Field(node: DSN, offset: BigInt)) => + val field = calleeGraph.find(node) + buGraph.mergeCells(buGraph.globalMapping(range)._1.getCell(buGraph.globalMapping(range)._2), + field.node.getCell(field.offset + offset)) + } + + buGraph.varToCell.getOrElse(begin(callee), Map.empty).foreach{ + case (variable: Variable, formal) if !ignoreRegisters.contains(variable) => + val test = buGraph.mergeCells(buGraph.adjust(formal), buGraph.adjust(callSite.paramCells(variable))) + test + case _ => + } + writesTo(callee).foreach( + reg => + val returnCells = buGraph.getCells(end(callee), reg) + // assert(returnCells.nonEmpty) + val result: DSC = returnCells.foldLeft(buGraph.adjust(callSite.returnCells(reg))){ + // + case (c: DSC, ret) => + val test = buGraph.mergeCells(c, buGraph.adjust(ret)) + test + } + ) + ) + buGraph.collectNodes + // bottom up phase finished + // clone bu graphs to top-down graphs + domain.foreach( + proc => + td.update(proc, bu(proc).cloneSelf()) + ) + + queue.enqueue(program.mainProcedure) + visited = Set() + + + // top-down phase + while queue.nonEmpty do + val proc = queue.dequeue() + visited += proc + queue.enqueueAll(CallGraph.succ(proc).diff(visited)) + val callersGraph = td(proc) + callersGraph.callsites.foreach( + callSite => + val callee = callSite.proc + val calleesGraph = td(callee) + assert(callersGraph.globalMapping.keySet.equals(calleesGraph.globalMapping.keySet)) + + callersGraph.globalMapping.foreach { + case (range: AddressRange, Field(oldNode, offset)) => + val node = callersGraph.find(oldNode).node + node.cloneNode(callersGraph, calleesGraph) + } + + + callSite.paramCells.foreach{ + case (variable: Variable, slice: Slice) => + val node = callersGraph.find(slice).node + node.cloneNode(callersGraph, calleesGraph) + } + + callSite.returnCells.foreach{ + case (variable: Variable, slice: Slice) => + val node = callersGraph.find(slice).node + node.cloneNode(callersGraph, callersGraph) + } + + + callersGraph.globalMapping.foreach { + case (range: AddressRange, Field(oldNode, internal)) => +// val node = callersGraph + val field = callersGraph.find(oldNode) + calleesGraph.mergeCells(calleesGraph.globalMapping(range)._1.getCell(calleesGraph.globalMapping(range)._2), + field.node.getCell(field.offset + internal)) + } + + callSite.paramCells.keySet.foreach( + variable => + val paramCells = calleesGraph.getCells(callSite.call, variable) // wrong param offset + paramCells.foldLeft(calleesGraph.adjust(calleesGraph.formals(variable))) { + (cell, slice) => + calleesGraph.mergeCells(calleesGraph.adjust(slice), cell) + } + ) + + calleesGraph.varToCell.getOrElse(callSite.call, Map.empty).foreach{ + case (variable: Variable, oldSlice: Slice) => + val slice = callersGraph.find(oldSlice) + val returnCells = calleesGraph.getCells(end(callee), variable) + returnCells.foldLeft(calleesGraph.adjust(slice)){ + case (c: DSC, retCell: Slice) => + calleesGraph.mergeCells(c, calleesGraph.adjust(retCell)) + } + case _ => ??? + } + ) + callersGraph.collectNodes + td.toMap } } diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index ae1b5587d..55bdda63d 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -183,81 +183,6 @@ class DSG(val proc: Procedure, } global -// private def replaceInEV(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = -// varToCell.foreach( -// (pos, m) => -// m.foreach { -// case (variable, slice) => -// if slice.cell.equals(oldCell) then -// m.update(variable, Slice(newCell, slice.internalOffset + internalOffsetChange)) -// } -// ) -// -// formals.foreach{ -// case (variable, slice) => -// if slice.cell.equals(oldCell) then -// formals.update(variable, Slice(newCell, slice.internalOffset + internalOffsetChange)) -// } -// -// private def replaceInPointTo(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = -// pointTo.foreach { -// case (pointer, slice: Slice) => -// if slice.cell.equals(oldCell) then -// pointTo.update(pointer, Slice(newCell, slice.internalOffset + internalOffsetChange)) -// } -// -// private def replaceInGlobals(oldCell: DSC, newCell: DSC) = -// if oldCell.node.isDefined then -// globalMapping.foreach { -// case (key, Field(node, offset)) => -// if node.equals(oldCell.node.get) then -// globalMapping.update(key, Field(newCell.node.get, offset)) -// } -// -// private def replaceInStack(oldCell: DSC, newCell: DSC) = -// if oldCell.node.isDefined then -// stackMapping.foreach{ -// case (offset, node) => -// if node.equals(oldCell.node.get) then -// stackMapping.update(offset, newCell.node.get) -// } -// -// private def replaceInCallSites(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = -// callsites.foreach( -// callSite => -// callSite.returnCells.foreach{ -// case (variable: Variable, slice: Slice) => -// if slice.cell.equals(oldCell) then -// callSite.returnCells.update(variable, Slice(newCell, slice.internalOffset + internalOffsetChange)) -// } -// -// callSite.paramCells.foreach{ -// case (variable: Variable, slice: Slice) => -// if slice.cell.equals(oldCell) then -// callSite.paramCells.update(variable, Slice(newCell, slice.internalOffset + internalOffsetChange)) -// } -// ) - - - // replaces an old cell with a new cell in all the mappings and updates their slice offset if applicable - // This is inefficient looking to replace it with a union-find approach -// def replace(oldCell: DSC, newCell: DSC, internalOffsetChange: BigInt) = -// replaceInEV(oldCell, newCell, internalOffsetChange) -// replaceInPointTo(oldCell, newCell, internalOffsetChange) -// replaceInGlobals(oldCell, newCell) -// replaceInStack(oldCell, newCell) -// replaceInCallSites(oldCell, newCell, internalOffsetChange) - -// def getPointee(cell: DSC): Slice = -// if !pointTo.contains(cell) then -// val node = DSN(Some(this)) -// pointTo.update(cell, Slice(node.cells(0), 0)) -// pointTo(cell) -// -// def getPointeeAdjusted(cell:DSC): DSC = -// val pointee = getPointee(cell) -// adjust(pointee) - def getCells(pos: CFGPosition, arg: Variable): Set[Slice] = if reachingDefs(pos).contains(arg) then reachingDefs(pos)(arg).foldLeft(Set[Slice]()) { @@ -304,9 +229,7 @@ class DSG(val proc: Procedure, val cell = field._2 val pointee = cell._pointee if pointee.isDefined && adjust(cell.getPointee) == cell then - // cell._pointee = Some(Slice(collapedCell, 0)) pointToItself = true - // collapedCell._pointee = Some(Slice(collapedCell, 0)) c else if pointee.isDefined then val slice = cell.getPointee @@ -338,10 +261,6 @@ class DSG(val proc: Procedure, else assert(find(n).node.collapsed) find(n).node -// node.cells.clear() -// node.cells.addOne(0, collapedCell) -// if cell.node.isDefined then -// node.cells(0)._pointee = Some(Slice(cell, pointeeInternalOffset)) /** * this function merges all the overlapping cells in the given node @@ -374,14 +293,11 @@ class DSG(val proc: Procedure, val slice1 = cell1.getPointee val slice2 = cell2.getPointee val result = mergeCells(adjust(slice1), adjust(slice2)) -// assert(pointTo(cell1)._1.equals(result)) cell1._pointee = Some(Slice(result, slice2.internalOffset.max(slice1.internalOffset))) else cell1._pointee = cell2._pointee -// cell2._pointee = None val internalOffsetChange = cell2.offset - cell1.offset cell2.node.get.cells.remove(cell2.offset) -// replace(cell2, cell1, internalOffsetChange) cell1.growSize((cell2.offset - cell1.offset) + cell2.largestAccessedSize) // might cause another collapse cell1 @@ -400,22 +316,8 @@ class DSG(val proc: Procedure, val parent: Field = find(node) parent.node.addCell(cell.offset + parent.offset, cell.largestAccessedSize) - -// val offsets = mutable.Map[DSN, BigInt]() - -// private def findOffset(current: DSN, result: DSN): BigInt = -// if current == result then -// 0 -// else -// current.offset + findOffset(current.embeddedIn.get, result) -// -// -// def resolve(cell: DSC): DSC = -// val node = cell.node.get -// val result: DSN = solver.find(Derm(node)).asInstanceOf[Derm].node -// val offset = findOffset(node, result) -// result.getCell(offset) - + def find(slice: Slice) : Slice = + deadjust(adjust(slice)) /** * merges two cells and unifies their nodes @@ -440,7 +342,6 @@ class DSG(val proc: Procedure, ne.cells(0) else if cell1.node.isEmpty then ??? // not sure how to handle this yet TODO possibly take it out of the merge? -// replace(cell1, cell2, 0) cell2 else if cell1.node.get.collapsed || cell2.node.get.collapsed then // a collapsed node @@ -528,17 +429,10 @@ class DSG(val proc: Procedure, val collapsedCell = resultNode.addCell(offset, largestAccess) val outgoing: Set[Slice] = cells.foldLeft(Set[Slice]()){ (set, cell) => - // replace incoming edges -// if cell.node.get.equals(node2) then -// replace(cell, collapsedCell, delta + cell.offset - offset) -// else -// assert(cell.node.get.equals(node1)) -// replace(cell, collapsedCell, cell.offset - offset) // collect outgoing edges if cell._pointee.isDefined then val pointee = cell.getPointee -// cell._pointee = None set + pointee else set @@ -547,12 +441,8 @@ class DSG(val proc: Procedure, if outgoing.size == 1 then collapsedCell._pointee = Some(outgoing.head) else if outgoing.size > 1 then -// var internal = outgoing.head._2 val result = outgoing.tail.foldLeft(adjust(outgoing.head)){ (result, pointee) => -// val cell = pointee._1 -// val pointeeInternal = pointee._2 -// internal = internal.max(pointeeInternal) mergeCells(result, adjust(pointee)) } @@ -634,11 +524,13 @@ class DSG(val proc: Procedure, def cloneSelf(): DSG = val newGraph = DSG(proc, constProp, varToSym, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params) assert(formals.size == newGraph.formals.size) + val nodes = mutable.Set[DSN]() val idToNode: mutable.Map[Int, DSN] = mutable.Map() formals.foreach{ case (variable: Variable, slice: Slice) => // assert(newGraph.formals.contains(variable)) - val node = slice.node + val node = find(slice).node + nodes.add(node) if !idToNode.contains(node.id) then val newNode = node.cloneSelf(newGraph) idToNode.update(node.id, newNode) @@ -651,9 +543,11 @@ class DSG(val proc: Procedure, if !newGraph.varToCell.contains(position) then newGraph.varToCell.update(position, mutable.Map[Variable, Slice]()) values.foreach{ - case (variable: Variable, slice: Slice) => + case (variable: Variable, s: Slice) => // assert(newGraph.varToCell(position).contains(variable)) + val slice = find(s) val node = slice.node + nodes.add(node) if !idToNode.contains(node.id) then val newNode = node.cloneSelf(newGraph) idToNode.update(node.id, newNode) @@ -662,7 +556,9 @@ class DSG(val proc: Procedure, } stackMapping.foreach{ - case (offset, node) => + case (offset, oldNode) => + val node = find(oldNode).node + nodes.add(node) assert(newGraph.stackMapping.contains(offset)) if !idToNode.contains(node.id) then val newNode = node.cloneSelf(newGraph) @@ -673,27 +569,33 @@ class DSG(val proc: Procedure, globalMapping.foreach { case (range: AddressRange, Field(node, offset)) => assert(newGraph.globalMapping.contains(range)) - if !idToNode.contains(node.id) then + val field = find(node) + nodes.add(field.node) + if !idToNode.contains(field.node.id) then val newNode = node.cloneSelf(newGraph) - idToNode.update(node.id, newNode) - newGraph.globalMapping.update(range, Field(idToNode(node.id), offset)) + idToNode.update(field.node.id, newNode) + newGraph.globalMapping.update(range, Field(idToNode(field.node.id), field.offset + offset)) } -// newGraph.pointTo.clear() -// pointTo.foreach { -// case (cell1: DSC, slice: Slice) => -// val node1 = cell1.node.get -// val node2 = slice.node -// if !idToNode.contains(node1.id) then -// val newNode1 = node1.cloneSelf(newGraph) -// idToNode.update(node1.id, newNode1) -// -// if !idToNode.contains(node2.id) then -// val newNode2 = node2.cloneSelf(newGraph) -// idToNode.update(node2.id, newNode2) -// -// newGraph.pointTo.update(idToNode(node1.id).cells(cell1.offset), Slice(idToNode(node2.id).cells(slice.offset), slice.internalOffset)) -// } + val queue: mutable.Queue[DSN] = mutable.Queue() + queue.addAll(nodes) + while queue.nonEmpty do + + val node = queue.dequeue() + node.cells.foreach { + case (offset: BigInt, cell: DSC) if cell._pointee.isDefined => + val id = cell.node.get.id + val pointee = find(cell.getPointee) + val pointeeId = pointee.node.id + if !idToNode.contains(pointeeId) then + queue.enqueue(pointee.node) + val newNode = pointee.node.cloneSelf(newGraph) + idToNode.update(pointeeId, newNode) + idToNode(id).cells(cell.offset)._pointee = Some(Slice(idToNode(pointeeId).cells(pointee.offset), pointee.internalOffset)) + + + case _ => + } callsites.foreach( callSite => @@ -701,14 +603,16 @@ class DSG(val proc: Procedure, newGraph.callsites.add(cs) assert(cs.paramCells.keySet.equals(callSite.paramCells.keySet)) callSite.paramCells.foreach{ - case (variable: Variable, slice: Slice) => + case (variable: Variable, oldSlice : Slice) => + val slice = find(oldSlice) assert(cs.paramCells.contains(variable)) val id = slice.node.id cs.paramCells.update(variable, Slice(idToNode(id).cells(slice.offset), slice.internalOffset)) } callSite.returnCells.foreach{ - case (variable: Variable, slice: Slice) => + case (variable: Variable, oldSlice: Slice) => + val slice = find(oldSlice) assert(cs.returnCells.contains(variable)) val id = slice.node.id cs.returnCells.update(variable, Slice(idToNode(id).cells(slice.offset), slice.internalOffset)) @@ -716,7 +620,6 @@ class DSG(val proc: Procedure, ) - newGraph.nodes.addAll(idToNode.values) newGraph @@ -819,31 +722,32 @@ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCount val varMap = t._2 varMap.foreach{ case (variable: Variable, slice: Slice) => - if slice.node.equals(this) then + if from.find(slice).node.equals(this) then to.varToCell.update( pos, + to.varToCell.getOrElseUpdate(pos, - mutable.Map[Variable, Slice]()) ++ Map(variable -> slice) + mutable.Map[Variable, Slice]()) ++ Map(variable -> from.find(slice)) ) } ) from.formals.foreach{ case (variable: Variable, slice: Slice) => - if slice.node.equals(this) then + if from.find(slice).node.equals(this) then to.varToCell.update( begin(from.proc), to.varToCell.getOrElseUpdate(begin(from.proc), - mutable.Map[Variable, Slice]()) ++ Map(variable -> slice) + mutable.Map[Variable, Slice]()) ++ Map(variable -> from.find(slice)) ) } -// cells.foreach { -// case (offset: BigInt, cell: DSC) => -// if from.pointTo.contains(cell) then -// val pointee = from.getPointee(cell) -// pointee._1.node.get.cloneNode(from, to) -// to.pointTo.update(cell, pointee) -// } + cells.foreach { + case (offset: BigInt, cell: DSC) => + if cell._pointee.isDefined then + val pointee = cell.getPointee + pointee.node.cloneNode(from, to) +// to.pointTo.update(cell, pointee) TODO check this is not necessary + } override def equals(obj: Any): Boolean = obj match @@ -875,21 +779,9 @@ case class DSC(node: Option[DSN], offset: BigInt) _pointee = Some(Slice(node.cells(0), 0)) else -// val node = cell.node.get -// val offset = cell.offset -// val parent: Field = find(node) -// parent.node.addCell(cell.offset + parent.offset, cell.largestAccessedSize) - - -// val slice = _pointee.get -// var node = slice.node val graph = _pointee.get.node.graph.get val resolvedPointee = graph.find(graph.adjust(_pointee.get)) -// val link = graph.solver.find(node.term) -// node = link._1.asInstanceOf[Derm].node -// val offset = link._2 -// val cell = node.addCell(offset + slice.cell.offset, slice.cell.largestAccessedSize) _pointee = Some(graph.deadjust(resolvedPointee)) _pointee.get diff --git a/src/test/scala/LocalTest.scala b/src/test/scala/LocalTest.scala index 73f665a94..68c590024 100644 --- a/src/test/scala/LocalTest.scala +++ b/src/test/scala/LocalTest.scala @@ -1,6 +1,6 @@ import analysis.{AddressRange, DSC, DSG, DSN, DataLocation, Derm, HeapLocation} import ir.Endian.BigEndian -import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, ConvertToSingleProcedureReturn, DirectCall, Memory, MemoryAssign, MemoryLoad, SharedMemory} +import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, CFGPosition, ConvertToSingleProcedureReturn, DirectCall, Memory, MemoryAssign, MemoryLoad, Register, SharedMemory} import org.scalatest.funsuite.AnyFunSuite import test_util.TestUtil import ir.dsl.* @@ -183,38 +183,7 @@ class LocalTest extends AnyFunSuite, TestUtil { } -// -// -// -// ignore("local jumptable2_clang main") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2_clang.adt", -// relfFile = "examples/jumptable2/jumptable2_clang.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.locals.get(program.mainProcedure) -//// assert(dsg.pointTo.size == 7) -//// assert(dsg.stackMapping.isEmpty) -//// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69680, 69684))._1.cells(0))._1.node.get.collapsed) -// } -// -// -// -// -// ignore("interproc unsafe pointer arithmetic") { -// // test interproc unification with points-to that have internal offsets into cells -// } -// -// + test("unsafe pointer arithmetic") { val results = RunUtils.loadAndTranslate( BASILConfig( @@ -249,611 +218,613 @@ class LocalTest extends AnyFunSuite, TestUtil { assert(stack48.node.get.allocationRegions.head.asInstanceOf[HeapLocation].size == 8) assert(dsg.adjust(stack48.getPointee).equals(stack40)) assert(dsg.adjust(stack48.getPointee).equals(stack56)) - println(dsg.find(dsg.stackMapping(24).cells(0)).getPointee) - println(dsg.find(dsg.stackMapping(40).cells(0)).getPointee) - assert(dsg.deadjust(stack24).cell.equals(dsg.deadjust(stack40).cell)) // these are pointees, they should point to the same cell at different offsets - assert(dsg.deadjust(stack40).internalOffset == 1) // todo check - assert(dsg.deadjust(stack24).internalOffset == 0) - assert(dsg.deadjust(stack24).offset == 0) + val unadjustedStack24Pointee = dsg.find(dsg.stackMapping(24).cells(0)).getPointee + val unadjustedStack40Pointee = dsg.find(dsg.stackMapping(40).cells(0)).getPointee + assert(unadjustedStack24Pointee.cell.equals(unadjustedStack40Pointee.cell)) + assert(unadjustedStack40Pointee.internalOffset == 1) + assert(unadjustedStack24Pointee.internalOffset == 0) + assert(unadjustedStack24Pointee.offset == 0) } -// -// test("interproc pointer arithmetic main") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", -// relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.locals.get(program.mainProcedure) -// val stack0 = dsg.stackMapping(0).cells(0) -// val stack8 = dsg.stackMapping(8).cells(0) -// val stack24 = dsg.stackMapping(24).cells(0) -// val stack32 = dsg.stackMapping(32).cells(0) -// val stack40 = dsg.stackMapping(40).cells(0) -// assert(dsg.pointTo.size == 9) -// assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) -// assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) -// assert(dsg.pointTo(stack24)._1.node.get.equals(dsg.pointTo(stack32)._1.node.get)) -// assert(dsg.pointTo(stack24)._1.offset == 0) -// assert(dsg.pointTo(stack32)._1.offset == 16) -// assert(dsg.pointTo.contains(dsg.pointTo(stack40)._1)) -// assert(!dsg.pointTo(stack40)._1.node.get.equals(dsg.pointTo(stack24)._1.node.get)) -// } -// -// test("interproc pointer arithmetic callee") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", -// relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.locals.get(program.procs("callee")) -// val stack8 = dsg.stackMapping(8).cells(0) // R31 + 8 -// val stack24 = dsg.stackMapping(24).cells(0) // R31 + 24 -// assert(dsg.pointTo.size == 3) -// assert(dsg.getPointee(stack8).equals(dsg.formals(R0))) -// assert(dsg.getPointee(stack8)._1.offset == 0) -// assert(dsg.getPointee(stack24)._1.equals(dsg.formals(R0)._1.node.get.cells(16))) -// } -// -// -// test("internal merge") { -// val mem = SharedMemory("mem", 10000, 10000) -// val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) -// val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) -// var program = prog( -// proc("main", -// block("operations", -//// Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), -// locAssign1, -// locAssign2, -// MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), -// MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), -// ret -// ) -// ) -// ) -// -// val returnUnifier = ConvertToSingleProcedureReturn() -// program = returnUnifier.visitProgram(program) -// -// val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) -// val dsg: DSG = results.locals.get(program.mainProcedure) -// assert(dsg.formals(R1).equals(dsg.formals(R2))) -// assert(dsg.varToCell(locAssign1)(R6)._1.equals(dsg.varToCell(locAssign2)(R7)._1)) -// assert(dsg.varToCell(locAssign1)(R6)._2 == 0) -// assert(dsg.varToCell(locAssign2)(R7)._2 == 1) -// assert(dsg.pointTo.contains(dsg.varToCell(locAssign1)(R6)._1)) -// assert(dsg.pointTo(dsg.varToCell(locAssign1)(R6)._1)._1.equals(dsg.formals(R1)._1)) -// assert(dsg.pointTo.size == 1) -// -// } -// -// test("offsetting from middle of cell to a new cell") { -// val mem = SharedMemory("mem", 10000, 10000) -// val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) -// val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) -// val locAssign3 = Assign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(8, 64)), Some("00005")) -// -// var program = prog( -// proc("main", -// block("operations", -// locAssign1, -// locAssign2, -// MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), -// MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), -// locAssign3, -// ret -// ) -// ) -// ) -// -// val returnUnifier = ConvertToSingleProcedureReturn() -// program = returnUnifier.visitProgram(program) -// -// val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) -// val dsg: DSG = results.locals.get(program.mainProcedure) -// assert(dsg.varToCell(locAssign3)(R5)._1.offset == 13) -// } -// -// test("offsetting from middle of cell to the same cell") { -// val mem = SharedMemory("mem", 10000, 10000) -// val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) -// val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) -// val locAssign3 = Assign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(7, 64)), Some("00005")) -// -// var program = prog( -// proc("main", -// block("operations", -// // Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), -// locAssign1, -// locAssign2, -// MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), -// MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), -// locAssign3, -// ret -// ) -// ) -// ) -// -// val returnUnifier = ConvertToSingleProcedureReturn() -// program = returnUnifier.visitProgram(program) -// -// val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) -// val dsg: DSG = results.locals.get(program.mainProcedure) -// assert(dsg.formals(R1).equals(dsg.formals(R2))) -// assert(dsg.varToCell(locAssign1)(R6)._1.equals(dsg.varToCell(locAssign2)(R7)._1)) -// assert(dsg.varToCell(locAssign1)(R6)._1.equals(dsg.varToCell(locAssign3)(R5)._1)) -// assert(dsg.varToCell(locAssign1)(R6)._2 == 0) -// assert(dsg.varToCell(locAssign2)(R7)._2 == 1) -// assert(dsg.varToCell(locAssign3)(R5)._2 == 8) -// assert(dsg.pointTo.contains(dsg.varToCell(locAssign1)(R6)._1)) -// assert(dsg.pointTo(dsg.varToCell(locAssign1)(R6)._1)._1.equals(dsg.formals(R1)._1)) -// assert(dsg.pointTo.size == 1) -// } -// -// test("internal offset transfer") { -// val mem = SharedMemory("mem", 10000, 10000) -// val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) -// val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) -// val locAssign3 = Assign(R5, R7, Some("00005")) -// -// var program = prog( -// proc("main", -// block("operations", -// // Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), -// locAssign1, -// locAssign2, -// MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), -// MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), -// locAssign3, -// ret -// ) -// ) -// ) -// -// val returnUnifier = ConvertToSingleProcedureReturn() -// program = returnUnifier.visitProgram(program) -// -// val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) -// val dsg: DSG = results.locals.get(program.mainProcedure) -// assert(dsg.varToCell(locAssign2)(R7).equals(dsg.varToCell(locAssign3)(R5))) -// } -// -// // bottom up tests -// test("bottom up jumptable2 sub_seven") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2.adt", -// relfFile = "examples/jumptable2/jumptable2.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.bus.get(program.procs("sub_seven")) -// assert(dsg.pointTo.size == 9) -// assert(dsg.stackMapping.isEmpty) -// println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) -// -// // initial global mappings -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) -// -// -// } -// -// test("bottom up jumptable2 add_six") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2.adt", -// relfFile = "examples/jumptable2/jumptable2.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.bus.get(program.procs("add_six")) -// assert(dsg.pointTo.size == 9) -// assert(dsg.stackMapping.isEmpty) -// println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) -// -// // initial global mappings -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) -// -// } -// -// test("bottomup jumptable2 add_two") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2.adt", -// relfFile = "examples/jumptable2/jumptable2.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.bus.get(program.procs("add_two")) -// assert(dsg.pointTo.size == 9) -// assert(dsg.stackMapping.isEmpty) -// println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) -// -// // initial global mappings -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) -// -// } -// -// test("bottom up jumptable2 main") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2.adt", -// relfFile = "examples/jumptable2/jumptable2.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// -// -// val program = results.ir.program -// val dsg = results.analysis.get.bus.get(program.mainProcedure) -// assert(dsg.pointTo.size == 13) // 13 -// val framePointer = dsg.stackMapping(0).cells(0) -// val stack8 = dsg.stackMapping(8).cells(0) -// val stack16 = dsg.stackMapping(16).cells(0) -// val stack28 = dsg.stackMapping(28).cells(0) -// assert(dsg.pointTo(framePointer).equals(dsg.formals(R29))) -// assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) -// assert(dsg.pointTo(stack16).equals(dsg.formals(R1))) -// assert(dsg.pointTo(stack28).equals(dsg.formals(R0))) -// -// // initial global mappings -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) -// -// // bu -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))._1.node.get.collapsed) -// -// } -// -// -// -// test("bottom up interproc pointer arithmetic callee") { -// // same as interproc pointer arithmetic callee's local graph (no changes should have been made) -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", -// relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.bus.get(program.procs("callee")) -// val stack8 = dsg.stackMapping(8).cells(0) // R31 + 8 -// val stack24 = dsg.stackMapping(24).cells(0) // R31 + 24 -// assert(dsg.pointTo.size == 3) -// assert(dsg.getPointee(stack8).equals(dsg.formals(R0))) -// assert(dsg.getPointee(stack8)._1.offset == 0) -// assert(dsg.getPointee(stack24)._1.equals(dsg.formals(R0)._1.node.get.cells(16))) -// } -// -// -// test("bottom up interproc pointer arithmetic main") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", -// relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.bus.get(program.mainProcedure) -// val stack0 = dsg.stackMapping(0).cells(0) -// val stack8 = dsg.stackMapping(8).cells(0) -// val stack24 = dsg.stackMapping(24).cells(0) -// val stack32 = dsg.stackMapping(32).cells(0) -// val stack40 = dsg.stackMapping(40).cells(0) -// assert(dsg.pointTo.size == 9) -// assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) -// assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) -// assert(dsg.pointTo(stack24)._1.node.get.equals(dsg.pointTo(stack32)._1.node.get)) -// assert(dsg.pointTo(stack24)._1.offset == 0) -// assert(dsg.pointTo(stack32)._1.offset == 16) -// assert(dsg.pointTo.contains(dsg.pointTo(stack40)._1)) -// assert(dsg.pointTo(stack40)._1.node.get.equals(dsg.pointTo(stack24)._1.node.get)) -// assert(dsg.pointTo(stack40)._1.offset == 32) -// assert(dsg.pointTo(stack40)._2 == 0) -// assert(dsg.pointTo(stack32)._2 == 0) -// assert(dsg.pointTo(stack24)._2 == 0) -// } -// -// -// // top down tests -// test("top down jumptable2 main") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2.adt", -// relfFile = "examples/jumptable2/jumptable2.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// -// val program = results.ir.program -// val dsg = results.analysis.get.tds.get(program.mainProcedure) + + test("interproc pointer arithmetic main") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", + relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.locals.get(program.mainProcedure) + val stack0 = dsg.adjust(dsg.find(dsg.stackMapping(0).cells(0)).getPointee) + val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) + val stack24 = dsg.adjust(dsg.find(dsg.stackMapping(24).cells(0)).getPointee) + val stack32 = dsg.adjust(dsg.find(dsg.stackMapping(32).cells(0)).getPointee) + val stack40 = dsg.adjust(dsg.find(dsg.stackMapping(40).cells(0)).getPointee) + + assert(stack0.equals(dsg.adjust(dsg.formals(R29)))) + assert(stack8.equals(dsg.adjust(dsg.formals(R30)))) + assert(stack24.node.get.equals(stack32.node.get)) + assert(stack24.offset == 0) + assert(stack32.offset == 16) + assert(stack40._pointee.isDefined) + assert(!stack40.node.get.equals(stack24.node.get)) + } + + test("interproc pointer arithmetic callee") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", + relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.locals.get(program.procs("callee")) + val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) + val stack24 = dsg.adjust(dsg.find(dsg.stackMapping(24).cells(0)).getPointee) + + assert(stack8.equals(dsg.adjust(dsg.formals(R0)))) + assert(stack8.offset == 0) + assert(stack24.equals(dsg.adjust(dsg.formals(R0)).node.get.cells(16))) + } + + + test("internal merge") { + val mem = SharedMemory("mem", 10000, 10000) + val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) + val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) + var program = prog( + proc("main", + block("operations", +// Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), + locAssign1, + locAssign2, + MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), + MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), + ret + ) + ) + ) + + val returnUnifier = ConvertToSingleProcedureReturn() + program = returnUnifier.visitProgram(program) + + val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) + val dsg: DSG = results.locals.get(program.mainProcedure) + assert(dsg.adjust(dsg.formals(R1)).equals(dsg.adjust(dsg.formals(R2)))) + assert(dsg.find(dsg.varToCell(locAssign1)(R6)).cell.equals(dsg.find(dsg.varToCell(locAssign2)(R7)).cell)) + assert(dsg.find(dsg.varToCell(locAssign1)(R6)).internalOffset == 0) + assert(dsg.find(dsg.varToCell(locAssign2)(R7)).internalOffset == 1) + assert(dsg.adjust(dsg.varToCell(locAssign1)(R6))._pointee.isDefined) + assert(dsg.adjust(dsg.adjust(dsg.varToCell(locAssign1)(R6)).getPointee).equals(dsg.adjust(dsg.formals(R1)))) + + } + + test("offsetting from middle of cell to a new cell") { + val mem = SharedMemory("mem", 10000, 10000) + val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) + val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) + val locAssign3 = Assign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(8, 64)), Some("00005")) + + var program = prog( + proc("main", + block("operations", + locAssign1, + locAssign2, + MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), + MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), + locAssign3, + ret + ) + ) + ) + + val returnUnifier = ConvertToSingleProcedureReturn() + program = returnUnifier.visitProgram(program) + + val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) + val dsg: DSG = results.locals.get(program.mainProcedure) + assert(dsg.find(dsg.varToCell(locAssign3)(R5)).offset == 13) + } + + test("offsetting from middle of cell to the same cell") { + val mem = SharedMemory("mem", 10000, 10000) + val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) + val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) + val locAssign3 = Assign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(7, 64)), Some("00005")) + + var program = prog( + proc("main", + block("operations", + // Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), + locAssign1, + locAssign2, + MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), + MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), + locAssign3, + ret + ) + ) + ) + + val returnUnifier = ConvertToSingleProcedureReturn() + program = returnUnifier.visitProgram(program) + + val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) + val dsg: DSG = results.locals.get(program.mainProcedure) + assert(dsg.find(dsg.formals(R1)).equals(dsg.find(dsg.formals(R2)))) + assert(dsg.find(dsg.varToCell(locAssign1)(R6)).cell.equals(dsg.find(dsg.varToCell(locAssign2)(R7)).cell)) + assert(dsg.find(dsg.varToCell(locAssign1)(R6)).cell.equals(dsg.find(dsg.varToCell(locAssign3)(R5)).cell)) + assert(dsg.find(dsg.varToCell(locAssign1)(R6)).internalOffset == 0) + assert(dsg.find(dsg.varToCell(locAssign2)(R7)).internalOffset == 1) + assert(dsg.find(dsg.varToCell(locAssign3)(R5)).internalOffset == 8) + assert(dsg.find(dsg.varToCell(locAssign1)(R6)).cell._pointee.isDefined) + assert(dsg.find(dsg.find(dsg.varToCell(locAssign1)(R6)).cell.getPointee).equals(dsg.find(dsg.formals(R1)))) + } + + test("internal offset transfer") { + val mem = SharedMemory("mem", 10000, 10000) + val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) + val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) + val locAssign3 = Assign(R5, R7, Some("00005")) + + var program = prog( + proc("main", + block("operations", + // Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), + locAssign1, + locAssign2, + MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), + MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), + locAssign3, + ret + ) + ) + ) + + val returnUnifier = ConvertToSingleProcedureReturn() + program = returnUnifier.visitProgram(program) + + val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) + val dsg: DSG = results.locals.get(program.mainProcedure) + assert(dsg.find(dsg.varToCell(locAssign2)(R7)).equals(dsg.find(dsg.varToCell(locAssign3)(R5)))) + } + + // bottom up tests + test("bottom up jumptable2 sub_seven") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.bus.get(program.procs("sub_seven")) + assert(dsg.stackMapping.isEmpty) + assert(dsg.find(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.collapsed) + + + // initial global mappings + assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + + } + + test("bottom up jumptable2 add_six") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.bus.get(program.procs("add_six")) + assert(dsg.stackMapping.isEmpty) + assert(dsg.find(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.collapsed) + + // initial global mappings + assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + + } + + test("bottomup jumptable2 add_two") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.bus.get(program.procs("add_two")) + assert(dsg.stackMapping.isEmpty) + assert(dsg.find(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.collapsed) + + // initial global mappings + assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + + } + + test("bottom up jumptable2 main") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + + + val program = results.ir.program + val dsg = results.analysis.get.bus.get(program.mainProcedure) + + val framePointer = dsg.find(dsg.stackMapping(0).cells(0)) + val stack8 = dsg.find(dsg.stackMapping(8).cells(0)) + val stack16 = dsg.find(dsg.stackMapping(16).cells(0)) + val stack28 = dsg.find(dsg.stackMapping(28).cells(0)) + assert(dsg.adjust(framePointer.getPointee).equals(dsg.adjust(dsg.formals(R29)))) + assert(dsg.adjust(stack8.getPointee).equals(dsg.adjust(dsg.formals(R30)))) + assert(dsg.adjust(stack16.getPointee).equals(dsg.adjust(dsg.formals(R1)))) + assert(dsg.adjust(stack28.getPointee).equals(dsg.adjust(dsg.formals(R0)))) + + + // initial global mappings + assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + + // bu + assert(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)).getPointee.node.collapsed) + + } + + + + test("bottom up interproc pointer arithmetic callee") { + // same as interproc pointer arithmetic callee's local graph (no changes should have been made) + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", + relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.bus.get(program.procs("callee")) + val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) + val stack24 = dsg.adjust(dsg.find(dsg.stackMapping(24).cells(0)).getPointee) + + assert(stack8.equals(dsg.adjust(dsg.formals(R0)))) + assert(stack8.offset == 0) + assert(stack24.equals(dsg.adjust(dsg.formals(R0)).node.get.cells(16))) + + } + + + test("bottom up interproc pointer arithmetic main") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", + relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.bus.get(program.mainProcedure) + + val stack0 = dsg.adjust(dsg.find(dsg.stackMapping(0).cells(0)).getPointee) + val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) + val stack24 = dsg.adjust(dsg.find(dsg.stackMapping(24).cells(0)).getPointee) + val stack32 = dsg.adjust(dsg.find(dsg.stackMapping(32).cells(0)).getPointee) + val stack40 = dsg.adjust(dsg.find(dsg.stackMapping(40).cells(0)).getPointee) + + assert(stack0.equals(dsg.adjust(dsg.formals(R29)))) + assert(stack8.equals(dsg.adjust(dsg.formals(R30)))) + assert(stack24.node.get.equals(stack32.node.get)) + assert(stack24.offset == 0) + assert(stack32.offset == 16) + assert(stack40._pointee.isDefined) + assert(stack40.node.get.equals(stack24.node.get)) + assert(stack40.offset == 32) + assert(dsg.find(dsg.stackMapping(40).cells(0)).getPointee.internalOffset == 0) + assert(dsg.find(dsg.stackMapping(32).cells(0)).getPointee.internalOffset == 0) + assert(dsg.find(dsg.stackMapping(24).cells(0)).getPointee.internalOffset == 0) + + } + + + // top down tests + test("top down jumptable2 main") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + + val program = results.ir.program + val dsg = results.analysis.get.tds.get(program.mainProcedure) // assert(dsg.pointTo.size == 13) // 13 -// val framePointer = dsg.stackMapping(0).cells(0) -// val stack8 = dsg.stackMapping(8).cells(0) -// val stack16 = dsg.stackMapping(16).cells(0) -// val stack28 = dsg.stackMapping(28).cells(0) -// assert(dsg.pointTo(framePointer).equals(dsg.formals(R29))) -// assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) -// assert(dsg.pointTo(stack16).equals(dsg.formals(R1))) -// assert(dsg.pointTo(stack28).equals(dsg.formals(R0))) -// -// // initial global mappings -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) -// -// // bu -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))._1.node.get.collapsed) -// } -// -// test("top down jumptable2 sub_seven") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2.adt", -// relfFile = "examples/jumptable2/jumptable2.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.tds.get(program.procs("sub_seven")) -// assert(dsg.pointTo.size == 9) -// assert(dsg.stackMapping.isEmpty) -// println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) -// -// // initial global mappings -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) -// -// -// } -// -// test("top down jumptable2 add_six") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2.adt", -// relfFile = "examples/jumptable2/jumptable2.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.tds.get(program.procs("add_six")) -// assert(dsg.pointTo.size == 9) -// assert(dsg.stackMapping.isEmpty) -// println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) -// -// // initial global mappings -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) -// -// } -// -// test("top down jumptable2 add_two") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/jumptable2/jumptable2.adt", -// relfFile = "examples/jumptable2/jumptable2.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.tds.get(program.procs("add_two")) -// assert(dsg.pointTo.size == 9) -// assert(dsg.stackMapping.isEmpty) -// println(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0))._1.node.get.collapsed) -// -// // initial global mappings -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8))._1.equals(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16))._1.equals(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0))) -// assert(dsg.pointTo(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0))._1.equals(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0))) -// -// } -// -// test("top down interproc pointer arithmetic callee") { -// // same as interproc pointer arithmetic callee's local graph (no changes should have been made) -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", -// relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.tds.get(program.procs("callee")) -// val stack8 = dsg.stackMapping(8).cells(0) // R31 + 8 -// val stack24 = dsg.stackMapping(24).cells(0) // R31 + 24 -// assert(dsg.pointTo.size == 6) -// assert(dsg.getPointee(stack8).equals(dsg.formals(R0))) -// assert(dsg.getPointee(stack8)._1.offset == 16) -// assert(dsg.getPointee(stack24)._1.equals(dsg.formals(R0)._1.node.get.cells(32))) -// } -// -// -// // top down phase should be the same as bu phase -// test("top down interproc pointer arithmetic main") { -// val results = RunUtils.loadAndTranslate( -// BASILConfig( -// loading = ILLoadingConfig( -// inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", -// relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", -// specFile = None, -// dumpIL = None, -// ), -// staticAnalysis = Some(StaticAnalysisConfig()), -// boogieTranslation = BoogieGeneratorConfig(), -// outputPrefix = "boogie_out", -// ) -// ) -// val program = results.ir.program -// val dsg = results.analysis.get.tds.get(program.mainProcedure) -// val stack0 = dsg.stackMapping(0).cells(0) -// val stack8 = dsg.stackMapping(8).cells(0) -// val stack24 = dsg.stackMapping(24).cells(0) -// val stack32 = dsg.stackMapping(32).cells(0) -// val stack40 = dsg.stackMapping(40).cells(0) -// assert(dsg.pointTo.size == 9) -// assert(dsg.pointTo(stack0).equals(dsg.formals(R29))) -// assert(dsg.pointTo(stack8).equals(dsg.formals(R30))) -// assert(dsg.pointTo(stack24)._1.node.get.equals(dsg.pointTo(stack32)._1.node.get)) -// assert(dsg.pointTo(stack24)._1.offset == 0) -// assert(dsg.pointTo(stack32)._1.offset == 16) -// assert(dsg.pointTo.contains(dsg.pointTo(stack40)._1)) -// assert(dsg.pointTo(stack40)._1.node.get.equals(dsg.pointTo(stack24)._1.node.get)) -// assert(dsg.pointTo(stack40)._1.offset == 32) -// assert(dsg.pointTo(stack40)._2 == 0) -// assert(dsg.pointTo(stack32)._2 == 0) -// assert(dsg.pointTo(stack24)._2 == 0) -// } -// + + val framePointer = dsg.find(dsg.stackMapping(0).cells(0)) + val stack8 = dsg.find(dsg.stackMapping(8).cells(0)) + val stack16 = dsg.find(dsg.stackMapping(16).cells(0)) + val stack28 = dsg.find(dsg.stackMapping(28).cells(0)) + assert(dsg.adjust(framePointer.getPointee).equals(dsg.adjust(dsg.formals(R29)))) + assert(dsg.adjust(stack8.getPointee).equals(dsg.adjust(dsg.formals(R30)))) + assert(dsg.adjust(stack16.getPointee).equals(dsg.adjust(dsg.formals(R1)))) + assert(dsg.adjust(stack28.getPointee).equals(dsg.adjust(dsg.formals(R0)))) + + + // initial global mappings + assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + + // bu + assert(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)).getPointee.node.collapsed) + + } + + test("top down jumptable2 sub_seven") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.tds.get(program.procs("sub_seven")) + assert(dsg.stackMapping.isEmpty) + assert(dsg.find(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.collapsed) + + + // initial global mappings + assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + + + } + + test("top down jumptable2 add_six") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.tds.get(program.procs("add_six")) + assert(dsg.stackMapping.isEmpty) + assert(dsg.find(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.collapsed) + + + // initial global mappings + assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + + + } + + test("top down jumptable2 add_two") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/jumptable2/jumptable2.adt", + relfFile = "examples/jumptable2/jumptable2.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.tds.get(program.procs("add_two")) + assert(dsg.stackMapping.isEmpty) + assert(dsg.find(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.collapsed) + + + // initial global mappings + assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + + + } + + test("top down interproc pointer arithmetic callee") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", + relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.tds.get(program.procs("callee")) + + val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) + val stack24 = dsg.adjust(dsg.find(dsg.stackMapping(24).cells(0)).getPointee) + + assert(stack8.equals(dsg.adjust(dsg.formals(R0)))) + assert(stack8.offset == 16) + assert(stack24.equals(dsg.adjust(dsg.formals(R0)).node.get.cells(32))) + + } + + + // top down phase should be the same as bu phase + test("top down interproc pointer arithmetic main") { + val results = RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", + relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + val program = results.ir.program + val dsg = results.analysis.get.tds.get(program.mainProcedure) + + + val stack0 = dsg.adjust(dsg.find(dsg.stackMapping(0).cells(0)).getPointee) + val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) + val stack24 = dsg.adjust(dsg.find(dsg.stackMapping(24).cells(0)).getPointee) + val stack32 = dsg.adjust(dsg.find(dsg.stackMapping(32).cells(0)).getPointee) + val stack40 = dsg.adjust(dsg.find(dsg.stackMapping(40).cells(0)).getPointee) + + assert(stack0.equals(dsg.adjust(dsg.formals(R29)))) + assert(stack8.equals(dsg.adjust(dsg.formals(R30)))) + assert(stack24.node.get.equals(stack32.node.get)) + assert(stack24.offset == 0) + assert(stack32.offset == 16) + assert(stack40._pointee.isDefined) + assert(stack40.node.get.equals(stack24.node.get)) + assert(stack40.offset == 32) + assert(dsg.find(dsg.stackMapping(40).cells(0)).getPointee.internalOffset == 0) + assert(dsg.find(dsg.stackMapping(32).cells(0)).getPointee.internalOffset == 0) + assert(dsg.find(dsg.stackMapping(24).cells(0)).getPointee.internalOffset == 0) + + } + } From 5a8d5e3eefbef2382ee96672d04ae381588869eb Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Tue, 3 Sep 2024 10:24:20 +1000 Subject: [PATCH 042/104] Handling negatives and using BigInts instead --- .../InterprocSteensgaardAnalysis.scala | 14 ++++----- src/main/scala/analysis/MemoryModelMap.scala | 29 ++++++++++--------- .../scala/analysis/MemoryRegionAnalysis.scala | 28 ++++++++---------- src/main/scala/analysis/RegionInjector.scala | 27 ++++++++--------- src/main/scala/analysis/UtilMethods.scala | 8 +++++ src/test/scala/PointsToTest.scala | 18 ++++++------ 6 files changed, 63 insertions(+), 61 deletions(-) diff --git a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala index 38153e277..271ff6b86 100644 --- a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala +++ b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala @@ -74,13 +74,13 @@ class InterprocSteensgaardAnalysis( * @param address * @return BitVecLiteral: the relocated address */ - def relocatedBase(address: BitVecLiteral): BitVecLiteral = { - val tableAddress = globalOffsets.getOrElse(address.value, address.value) + def relocatedBase(address: BigInt): BitVecLiteral = { + val tableAddress = globalOffsets.getOrElse(address, address) // this condition checks if the address is not layered and returns if it is not - if (tableAddress != address.value && !globalOffsets.contains(tableAddress)) { - return address + if (tableAddress != address && !globalOffsets.contains(tableAddress)) { + return BitVecLiteral(address, 64) } - BitVecLiteral(tableAddress, address.size) + BitVecLiteral(tableAddress, 64) } /** @@ -153,7 +153,7 @@ class InterprocSteensgaardAnalysis( } { r match { case stackRegion: StackRegion => - val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) + val nextOffset = BinaryExpr(binExpr.op, BitVecLiteral(stackRegion.start, 64), b) evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, b2), n) } @@ -265,7 +265,7 @@ class InterprocSteensgaardAnalysis( case directCall: DirectCall => // X = alloc P: [[X]] = ↑[[alloc-i]] if (directCall.target.name == "malloc") { - val alloc = HeapRegion(nextMallocCount(), BitVecLiteral(BigInt(0), 0), IRWalk.procedure(cmd)) + val alloc = HeapRegion(nextMallocCount(), 0, IRWalk.procedure(cmd)) unify(IdentifierVariable(RegisterVariableWrapper(mallocVariable, getUse(mallocVariable, cmd, reachingDefs))), PointerRef(AllocVariable(alloc))) } diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index 19a372405..620fff93c 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -71,11 +71,11 @@ class MemoryModelMap { case h: HeapRegion => val currentHeapMap = heapMap if (currentHeapMap.isEmpty) { - currentHeapMap(RangeKey(offset, offset + h.size.value - 1)) = h + currentHeapMap(RangeKey(offset, offset + h.size - 1)) = h } else { val currentMaxRange = currentHeapMap.keys.maxBy(_.end) val currentMaxRegion = currentHeapMap(currentMaxRange) - currentHeapMap(RangeKey(currentMaxRange.start + 1, h.size.value - 1)) = h + currentHeapMap(RangeKey(currentMaxRange.start + 1, h.size - 1)) = h } } } @@ -154,7 +154,7 @@ class MemoryModelMap { val reversedExternalFunctionRgns = (externalFunctions ++ globalAddresses).map((offset, name) => resolveInverseGlobalOffset(name, offset, globalOffsets) -> name) val filteredGlobalOffsets = globalAddresses.filterNot((offset, name) => reversedExternalFunctionRgns.contains(offset)) - val externalFunctionRgns = (reversedExternalFunctionRgns ++ filteredGlobalOffsets).map((offset, name) => DataRegion(name, BitVecLiteral(offset, 64))) + val externalFunctionRgns = (reversedExternalFunctionRgns ++ filteredGlobalOffsets).map((offset, name) => DataRegion(name, offset)) // we should collect all data regions otherwise the ordering might be wrong var dataRgns: Set[DataRegion] = Set.empty @@ -165,18 +165,18 @@ class MemoryModelMap { exitNodes.foreach(exitNode => if (procedureToSharedRegions.contains(exitNode)) { val sharedRegions = procedureToSharedRegions(exitNode) - sharedStacks(exitNode.name) = sharedRegions.collect { case r: StackRegion => r }.toList.sortBy(_.start.value) + sharedStacks(exitNode.name) = sharedRegions.collect { case r: StackRegion => r }.toList.sortBy(_.start) } // for each function exit node we get the memory region and add it to the mapping - val stackRgns = regionsPerProcedure(exitNode).collect { case r: StackRegion => r }.toList.sortBy(_.start.value) + val stackRgns = regionsPerProcedure(exitNode).collect { case r: StackRegion => r }.toList.sortBy(_.start) dataRgns = dataRgns ++ regionsPerProcedure(exitNode).collect { case r: DataRegion => r } localStacks(exitNode.name) = stackRgns ) // add externalFunctionRgn to dataRgns and sort by value - val allDataRgns = (dataRgns ++ externalFunctionRgns).toList.sortBy(_.start.value) + val allDataRgns = (dataRgns ++ externalFunctionRgns).toList.sortBy(_.start) for (dataRgn <- allDataRgns) { - add(dataRgn.start.value, dataRgn) + add(dataRgn.start, dataRgn) } // add heap regions @@ -200,7 +200,7 @@ class MemoryModelMap { contextStack.push(localStacks(funName)) stackMap.clear() for (stackRgn <- contextStack.top) { - add(stackRgn.start.value, stackRgn) + add(stackRgn.start, stackRgn) } if (!sharedStacks.contains(funName)) { @@ -209,7 +209,7 @@ class MemoryModelMap { sharedContextStack.push(sharedStacks(funName)) sharedStackMap.clear() for (stackRgn <- sharedContextStack.top) { - add(stackRgn.start.value, stackRgn, true) + add(stackRgn.start, stackRgn, true) } } @@ -218,7 +218,7 @@ class MemoryModelMap { contextStack.pop() stackMap.clear() for (stackRgn <- contextStack.top) { - add(stackRgn.start.value, stackRgn) + add(stackRgn.start, stackRgn) } } @@ -226,7 +226,7 @@ class MemoryModelMap { sharedContextStack.pop() sharedStackMap.clear() for (stackRgn <- sharedContextStack.top) { - add(stackRgn.start.value, stackRgn, true) + add(stackRgn.start, stackRgn, true) } } } @@ -435,17 +435,18 @@ class MemoryModelMap { trait MemoryRegion { val regionIdentifier: String + val subRegions: mutable.Set[MemoryRegion] = mutable.Set() } -case class StackRegion(override val regionIdentifier: String, start: BitVecLiteral, parent: Procedure) extends MemoryRegion { +case class StackRegion(override val regionIdentifier: String, start: BigInt, parent: Procedure) extends MemoryRegion { override def toString: String = s"Stack($regionIdentifier, $start, ${parent.name})" } -case class HeapRegion(override val regionIdentifier: String, size: BitVecLiteral, parent: Procedure) extends MemoryRegion { +case class HeapRegion(override val regionIdentifier: String, size: BigInt, parent: Procedure) extends MemoryRegion { override def toString: String = s"Heap($regionIdentifier, $size)" } -case class DataRegion(override val regionIdentifier: String, start: BitVecLiteral) extends MemoryRegion { +case class DataRegion(override val regionIdentifier: String, start: BigInt) extends MemoryRegion { override def toString: String = s"Data($regionIdentifier, $start)" } diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 8decde0e4..9e064648b 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -20,7 +20,7 @@ trait MemoryRegionAnalysis(val program: Program, var mallocCount: Int = 0 private var stackCount: Int = 0 - val stackMap: mutable.Map[Procedure, mutable.Map[Expr, StackRegion]] = mutable.Map() + val stackMap: mutable.Map[Procedure, mutable.Map[BigInt, StackRegion]] = mutable.Map() private def nextMallocCount() = { mallocCount += 1 @@ -40,7 +40,7 @@ trait MemoryRegionAnalysis(val program: Program, * @param parent : the function entry node * @return the stack region corresponding to the offset */ - private def poolMaster(expr: BitVecLiteral, stackBase: Procedure): StackRegion = { + private def poolMaster(expr: BigInt, stackBase: Procedure): StackRegion = { val stackPool = stackMap.getOrElseUpdate(stackBase, mutable.HashMap()) if (stackPool.contains(expr)) { stackPool(expr) @@ -110,12 +110,8 @@ trait MemoryRegionAnalysis(val program: Program, case Some(b: BitVecLiteral) => regions.foreach { case stackRegion: StackRegion => - val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) - evaluateExpression(nextOffset, constantProp(n)) match { - case Some(b2: BitVecLiteral) => - reducedRegions = reducedRegions + poolMaster(b2, IRWalk.procedure(n)) - case None => - } + val nextOffset = bitVectorOpToBigIntOp(binExpr.op, stackRegion.start, b.value) + reducedRegions = reducedRegions + poolMaster(nextOffset, IRWalk.procedure(n)) case _ => } case None => @@ -150,11 +146,8 @@ trait MemoryRegionAnalysis(val program: Program, if (spList.contains(binOp.arg1)) { evaluateExpression(binOp.arg2, constantProp(n)) match { case Some(b: BitVecLiteral) => - if (isNegative(b)) { - Set(poolMaster(BitVecLiteral(0, 64), IRWalk.procedure(n))) - } else { - Set(poolMaster(b, IRWalk.procedure(n))) - } + val negB = if isNegative(b) then -b.value else b.value + Set(poolMaster(negB, IRWalk.procedure(n))) case None => env } } else if (reducibleToRegion(binOp, n).nonEmpty) { @@ -167,7 +160,7 @@ trait MemoryRegionAnalysis(val program: Program, } case variable: Variable => variable match { - case reg: Register if spList.contains(reg) => + case reg: Register if spList.contains(reg) => // TODO: this is a hack eval(BitVecLiteral(0, 64), env, n) case _ => evaluateExpression(variable, constantProp(n)) match { @@ -181,7 +174,8 @@ trait MemoryRegionAnalysis(val program: Program, eval(memoryLoad.index, env, n) // ignore case where it could be a global region (loaded later in MMM from relf) case b: BitVecLiteral => - Set(poolMaster(b, IRWalk.procedure(n))) + val negB = if isNegative(b) then -b.value else b.value + Set(poolMaster(negB, IRWalk.procedure(n))) // we cannot evaluate this to a concrete value, we need VSA for this case _ => Logger.debug(s"type: ${exp.getClass} $exp\n") @@ -214,7 +208,9 @@ trait MemoryRegionAnalysis(val program: Program, // } if (directCall.target.name == "malloc") { evaluateExpression(mallocVariable, constantProp(n)) match { - case Some(b: BitVecLiteral) => regionLattice.lub(s, Set(HeapRegion(nextMallocCount(), b, IRWalk.procedure(n)))) + case Some(b: BitVecLiteral) => + val negB = if isNegative(b) then -b.value else b.value + regionLattice.lub(s, Set(HeapRegion(nextMallocCount(), negB, IRWalk.procedure(n)))) case None => s } } else { diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index 424898093..c1beca748 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -34,13 +34,13 @@ class RegionInjector(domain: mutable.Set[CFGPosition], * @param globalOffsets * @return BitVecLiteral: the relocated address */ - def relocatedBase(address: BitVecLiteral, globalOffsets: Map[BigInt, BigInt]): BitVecLiteral = { - val tableAddress = globalOffsets.getOrElse(address.value, address.value) + def relocatedBase(address: BigInt, globalOffsets: Map[BigInt, BigInt]): BitVecLiteral = { + val tableAddress = globalOffsets.getOrElse(address, address) // this condition checks if the address is not layered and returns if it is not - if (tableAddress != address.value && !globalOffsets.contains(tableAddress)) { - return address + if (tableAddress != address && !globalOffsets.contains(tableAddress)) { + return BitVecLiteral(address, 64) } - BitVecLiteral(tableAddress, address.size) + BitVecLiteral(tableAddress, 64) } /** @@ -116,17 +116,14 @@ class RegionInjector(domain: mutable.Set[CFGPosition], case stackRegion: StackRegion => println(s"StackRegion: ${stackRegion.start}") println(s"BitVecLiteral: ${b}") - if (b.size == stackRegion.start.size) { - val nextOffset = BinaryExpr(binExpr.op, stackRegion.start, b) - evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => - reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, b2), n) - } - } + //if (b.size == stackRegion.start.size) { TODO: Double check why this is needed + val nextOffset = bitVectorOpToBigIntOp(binExpr.op, stackRegion.start, b.value) + reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, BitVecLiteral(nextOffset, 64)), n) + //} case dataRegion: DataRegion => - val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start, globalOffsets), b) - evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => - reducedRegions ++= exprToRegion(b2, n) - } + //val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start, globalOffsets), b) + val nextOffset = bitVectorOpToBigIntOp(binExpr.op, dataRegion.start, b.value) + reducedRegions ++= exprToRegion(BitVecLiteral(nextOffset, 64), n) case _ => } } diff --git a/src/main/scala/analysis/UtilMethods.scala b/src/main/scala/analysis/UtilMethods.scala index 2d6c54090..a91595cd5 100644 --- a/src/main/scala/analysis/UtilMethods.scala +++ b/src/main/scala/analysis/UtilMethods.scala @@ -170,4 +170,12 @@ def unwrapExpr(expr: Expr): Set[Expr] = { case _ => } buffers +} + +def bitVectorOpToBigIntOp(op: BinOp, lhs: BigInt, rhs: BigInt): BigInt = { + op match { + case BVADD => lhs + rhs + case BVSUB => lhs - rhs + case _ => throw RuntimeException("Binary operation support not implemented: " + op) + } } \ No newline at end of file diff --git a/src/test/scala/PointsToTest.scala b/src/test/scala/PointsToTest.scala index 32131ed46..72c21e22b 100644 --- a/src/test/scala/PointsToTest.scala +++ b/src/test/scala/PointsToTest.scala @@ -76,7 +76,7 @@ class PointsToTest extends AnyFunSuite with OneInstancePerTest with BeforeAndAft val results = runAnalyses(program) results.mmmResults.pushContext("main") assert(results.mmmResults.findStackObject(BigInt(4)).isDefined) - assert(results.mmmResults.findStackObject(BigInt(4)).get.start == bv64(4)) + assert(results.mmmResults.findStackObject(BigInt(4)).get.start == BigInt(4)) assert(results.mmmResults.findStackObject(BigInt(4)).get.regionIdentifier == "stack_1") } @@ -110,10 +110,10 @@ class PointsToTest extends AnyFunSuite with OneInstancePerTest with BeforeAndAft assert(results.mmmResults.findStackObject(BigInt(10)).isDefined) - assert(results.mmmResults.findStackObject(BigInt(4)).get.start == bv64(4)) - assert(results.mmmResults.findStackObject(BigInt(5)).get.start == bv64(4)) - assert(results.mmmResults.findStackObject(BigInt(6)).get.start == bv64(6)) - assert(results.mmmResults.findStackObject(BigInt(10)).get.start == bv64(6)) + assert(results.mmmResults.findStackObject(BigInt(4)).get.start == BigInt(4)) + assert(results.mmmResults.findStackObject(BigInt(5)).get.start == BigInt(4)) + assert(results.mmmResults.findStackObject(BigInt(6)).get.start == BigInt(6)) + assert(results.mmmResults.findStackObject(BigInt(10)).get.start == BigInt(6)) } // /** @@ -193,7 +193,7 @@ class PointsToTest extends AnyFunSuite with OneInstancePerTest with BeforeAndAft results.mmmResults.pushContext("main") assert(results.mmmResults.findStackObject(BigInt(6)).isDefined) - assert(results.mmmResults.findStackObject(BigInt(6)).get.start == bv64(6)) + assert(results.mmmResults.findStackObject(BigInt(6)).get.start == BigInt(6)) /* ------------------------------------------------------------------------- */ @@ -201,8 +201,8 @@ class PointsToTest extends AnyFunSuite with OneInstancePerTest with BeforeAndAft assert(results.mmmResults.findSharedStackObject(BigInt(6)).nonEmpty) assert(results.mmmResults.findSharedStackObject(BigInt(10)).nonEmpty) - assert(results.mmmResults.findSharedStackObject(BigInt(6)).head.start == bv64(6)) - assert(results.mmmResults.findSharedStackObject(BigInt(10)).head.start == bv64(10)) + assert(results.mmmResults.findSharedStackObject(BigInt(6)).head.start == BigInt(6)) + assert(results.mmmResults.findSharedStackObject(BigInt(10)).head.start == BigInt(10)) } /** @@ -252,7 +252,7 @@ class PointsToTest extends AnyFunSuite with OneInstancePerTest with BeforeAndAft results.mmmResults.pushContext("main") assert(results.mmmResults.findStackObject(BigInt(6)).isDefined) - assert(results.mmmResults.findStackObject(BigInt(6)).get.start == bv64(6)) + assert(results.mmmResults.findStackObject(BigInt(6)).get.start == BigInt(6)) /* ------------------------------------------------------------------------- */ From 6ecbf095de9ca2b9e61e816ab589e60e1ea58aba Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Wed, 4 Sep 2024 14:51:31 +1000 Subject: [PATCH 043/104] visualiser --- src/main/scala/analysis/DSAUtility.scala | 121 ++++++++++++++++++- src/main/scala/analysis/Local.scala | 16 +-- src/main/scala/cfg_visualiser/DotTools.scala | 65 ++++++++++ src/test/scala/LocalTest.scala | 2 +- 4 files changed, 191 insertions(+), 13 deletions(-) diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index 55bdda63d..550c20631 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -1,6 +1,7 @@ package analysis import analysis.solvers.{DSAUnionFindSolver, UnionFindSolver, Var} +import cfg_visualiser.{DotStruct, DotStructElement, StructArrow, StructDotGraph} import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IntraProcIRCursor, Literal, Memory, MemoryAssign, MemoryLoad, Procedure, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, begin, computeDomain, toShortString} import specification.{ExternalFunction, SpecGlobal, SymbolTableEntry} @@ -198,12 +199,124 @@ class DSG(val proc: Procedure, */ def collectNodes = nodes.clear() - nodes.addAll(formals.values.map(_._1.node.get)) + nodes.addAll(formals.values.map(_._1.node.get).map(n => find(n).node)) varToCell.values.foreach( - value => nodes.addAll(value.values.map(_._1.node.get)) + value => nodes.addAll(value.values.map(_._1.node.get).map(n => find(n).node)) ) - nodes.addAll(stackMapping.values) - nodes.addAll(globalMapping.values.map(_._1)) + nodes.addAll(stackMapping.values.map(n => find(n).node)) + nodes.addAll(globalMapping.values.map(_._1).map(n => find(n).node)) + + val queue: mutable.Queue[DSN] = mutable.Queue() + queue.enqueueAll(nodes) + while queue.nonEmpty do + val cur = queue.dequeue() + cur.cells.foreach { + case (offset: BigInt, cell: DSC) if cell._pointee.isDefined => + val node = find(cell.getPointee.node).node + if !nodes.contains(node) then + nodes.add(node) + queue.enqueue(node) + case _ => + } + + def toDot: String = { + collectNodes + + + var structs = nodes.foldLeft(Set[DotStruct]()) { + (s, n) => + s + DotStruct(n.id.toString, n.toString, Some(n.cells.keys.map(o => o.toString))) + } + + structs ++= formals.foldLeft(Set[DotStruct]()) { + (s, n) => + val variable = n._1.name + s + DotStruct(s"Formal_$variable", s"Formal_$variable", None) + } + + structs ++= varToCell.foldLeft(Set[DotStruct]()) { + (s, r) => + val pos = r._1 + val mapping = r._2 + s ++ mapping.foldLeft(Set[DotStruct]()) { + (k, n) => + val variable = n._1.name + k + DotStruct(s"SSA_${pos.toShortString.slice(1, 9)}_$variable", s"SSA_${pos}_$variable", None, false) + } + } + + + structs ++= globalMapping.foldLeft(Set[DotStruct]()) { + (s, n) => + val range = n._1 + s + DotStruct(s"Global_${range.start}_${range.end}", s"Global_$range", None) + } + + structs ++= stackMapping.foldLeft(Set[DotStruct]()) { + (s, n) => + val offset = n._1 + s + DotStruct(s"Stack_$offset", s"Stack_$offset", None) + } + + var arrows = nodes.foldLeft(Set[StructArrow]()) { + (s, node) => + s ++ node.cells.foldLeft(Set[StructArrow]()) { + (k, c) => + val offset = c._1 + val cell = c._2 + if cell._pointee.isDefined then + val pointee = find(cell.getPointee) + s + StructArrow(DotStructElement(node.id.toString, Some(offset.toString)), DotStructElement(pointee.node.id.toString, Some(pointee.cell.offset.toString)), pointee.internalOffset.toString) + else + s + } + } + + + arrows ++= formals.foldLeft(Set[StructArrow]()) { + (s, n) => + val variable = n._1.name + val value = find(n._2) + s + StructArrow(DotStructElement(s"Formal_$variable", None), DotStructElement(value.node.id.toString, Some(value.cell.offset.toString)), value.internalOffset.toString) + } + + arrows ++= varToCell.foldLeft(Set[StructArrow]()) { + (s, r) => + val pos = r._1 + val mapping = r._2 + s ++ mapping.foldLeft(Set[StructArrow]()) { + (k, n) => + val variable = n._1.name + val value = find(n._2) + k + StructArrow(DotStructElement(s"SSA_${pos.toShortString.slice(1, 9)}_$variable", None), DotStructElement(value.node.id.toString, Some(value.cell.offset.toString)), value.internalOffset.toString) + } + } + + + arrows ++= globalMapping.foldLeft(Set[StructArrow]()) { + (s, n) => + val range = n._1 + val node= find(n._2.node).node + val offset = n._2.offset + find(n._2.node).offset + val cellOffset = node.getCell(offset).offset + val internalOffset = offset - cellOffset + s + StructArrow(DotStructElement(s"Global_${range.start}_${range.end}", None), DotStructElement(node.id.toString, Some(cellOffset.toString)), internalOffset.toString) + } + + arrows ++= stackMapping.foldLeft(Set[StructArrow]()) { + (s, n) => + val offset = n._1 + val node = find(n._2).node + val nodeOffset = find(n._2).offset + val cellOffset = node.getCell(nodeOffset).offset + val internalOffset = nodeOffset - cellOffset + s + StructArrow(DotStructElement(s"Stack_$offset", None), DotStructElement(node.id.toString, Some(cellOffset.toString)), internalOffset.toString) + } + + + StructDotGraph(proc.name, structs, arrows).toDotString + } + /** * Collapses the node causing it to lose field sensitivity diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index 4cb3e26ee..464f16364 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -339,13 +339,13 @@ class Local( val b = graph.solver.solution() graph.collectNodes - graph.nodes.foreach(node => - node.children.foreach( - child => - assert(graph.solver.find(child._1.term).equals(graph.solver.find(node.term))) - assert(graph.solver.find(child._1.term)._2.equals(child._2)) - - ) - ) +// graph.nodes.foreach(node => +// node.children.foreach( +// child => +// assert(graph.solver.find(child._1.term).equals(graph.solver.find(node.term))) +// assert(graph.solver.find(child._1.term)._2.equals(child._2)) +// +// ) +// ) graph } diff --git a/src/main/scala/cfg_visualiser/DotTools.scala b/src/main/scala/cfg_visualiser/DotTools.scala index 937bd8293..a57e1cc01 100644 --- a/src/main/scala/cfg_visualiser/DotTools.scala +++ b/src/main/scala/cfg_visualiser/DotTools.scala @@ -136,3 +136,68 @@ class DotGraph(val title: String, val nodes: Iterable[DotNode], val edges: Itera def toDotString: String = "digraph " + title + " {\n" + (nodes ++ edges).foldLeft("")((str, elm) => str + elm.toDotString + "\n") + "}" } + + + +class DotStruct(val id: String, val details: String, val fields: Option[Iterable[String]], val verbose: Boolean = false) extends DotElement { + def equals(other: DotStruct): Boolean = toDotString.equals(other.toDotString) + + + val label = s"\"{<$id> ${if verbose then details else id} ${if fields.isDefined then s" | {${fields.get.map(f => s"<$f> $f").mkString("|")}}" else "" }}\"" + override def toString: String = toDotString + + override def toDotString: String = + s"$id " + "[label=" + label + "]" +} + +class DotStructElement(val id: String, val field: Option[String]) extends DotElement { + def equals(other: DotStruct): Boolean = toDotString.equals(other.toDotString) + override def toString: String = toDotString + + override def toDotString: String = + s"$id${if field.isDefined then ":" + field.get else ""}" +} + +case class StructArrow( + from: DotStructElement, + to: DotStructElement, + label: String = "", + arrow: String = "->", + style: String = "solid", + colour: String = "black") extends DotElement { + + def equals(other: DotArrow): Boolean = toDotString.equals(other.toDotString) + + def toDotString: String = + s"${from.toString} $arrow ${to.toString} [label=\"$label\", style=\"$style\", color=\"$colour\"]" +} + + +/** Represents a Graphviz dot graph. + */ +class StructDotGraph(val title: String, val nodes: Iterable[DotStruct], val edges: Iterable[StructArrow]) extends DotElement { + + def this(nodes: List[DotStruct], edges: List[StructArrow]) = this("", nodes, edges) + + def this(title: String) = this(title, List(), List()) + + def this() = this(List(), List()) + + def addGraph(g: StructDotGraph): StructDotGraph = { + val ng = g.nodes.foldLeft(this)((g, n) => g.addNode(n)) + g.edges.foldLeft(ng)((g, e) => g.addEdge(e)) + } + + def addNode(n: DotStruct): StructDotGraph = + if (nodes.exists(a => n.equals(a))) this + else new StructDotGraph(title, nodes ++ List(n), edges) + + def addEdge(e: StructArrow): StructDotGraph = + if (edges.exists(a => e.equals(a))) this + else new StructDotGraph(title, nodes, edges ++ List(e)) + + override def toString: String = toDotString + + def toDotString: String = "digraph " + title + " {\nrankdir=\"LR\"\nnode [shape=record];\n" + (nodes ++ edges).foldLeft("")((str, elm) => str + elm.toDotString + "\n") + "}" +} + diff --git a/src/test/scala/LocalTest.scala b/src/test/scala/LocalTest.scala index 68c590024..b232745bf 100644 --- a/src/test/scala/LocalTest.scala +++ b/src/test/scala/LocalTest.scala @@ -824,7 +824,7 @@ class LocalTest extends AnyFunSuite, TestUtil { assert(dsg.find(dsg.stackMapping(40).cells(0)).getPointee.internalOffset == 0) assert(dsg.find(dsg.stackMapping(32).cells(0)).getPointee.internalOffset == 0) assert(dsg.find(dsg.stackMapping(24).cells(0)).getPointee.internalOffset == 0) - + } } From 6a74737b22c6d885288ca640d7182dfddbd98346 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Thu, 5 Sep 2024 12:59:36 +1000 Subject: [PATCH 044/104] Added sub-accesses to approximate stack region sizes --- .../InterprocSteensgaardAnalysis.scala | 6 +- src/main/scala/analysis/MemoryModelMap.scala | 81 ++++++++++++++----- .../scala/analysis/MemoryRegionAnalysis.scala | 47 ++++++----- 3 files changed, 89 insertions(+), 45 deletions(-) diff --git a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala index 271ff6b86..268fba7e8 100644 --- a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala +++ b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala @@ -153,10 +153,8 @@ class InterprocSteensgaardAnalysis( } { r match { case stackRegion: StackRegion => - val nextOffset = BinaryExpr(binExpr.op, BitVecLiteral(stackRegion.start, 64), b) - evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => - reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, b2), n) - } + val nextOffset = bitVectorOpToBigIntOp(binExpr.op, stackRegion.start, b.value) + reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, BitVecLiteral(nextOffset, 64)), n) case dataRegion: DataRegion => Logger.debug(s"Hey, I'm a data region: $dataRegion") Logger.debug(s"Hey, I'm a offset: $b") diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index 620fff93c..44fb4f2aa 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -8,25 +8,27 @@ import scala.collection.mutable // Define a case class to represent a range case class RangeKey(start: BigInt, end: BigInt) extends Ordered[RangeKey]: - val size: BigInt = end - start + val size: BigInt = end - start + 1 override def compare(that: RangeKey): Int = { if (start < that.start) -1 else if (start > that.start) 1 else 0 } - override def toString: String = s"Range[$start, $end]" + override def toString: String = s"Range[$start, $end] (size: $size)" // Custom data structure for storing range-to-object mappings class MemoryModelMap { private val MAX_BIGINT: BigInt = BigInt(Long.MaxValue) - private val contextStack = mutable.Stack.empty[List[StackRegion]] + private val contextStack = mutable.Stack.empty[String] private val sharedContextStack = mutable.Stack.empty[List[StackRegion]] private val localStacks = mutable.Map[String, List[StackRegion]]() private val sharedStacks = mutable.Map[String, List[StackRegion]]() private val stackMap: mutable.Map[RangeKey, StackRegion] = mutable.TreeMap() + private val bufferedStackMap: mutable.Map[String, mutable.Map[RangeKey, StackRegion]] = mutable.Map() private val sharedStackMap: mutable.Map[Procedure, mutable.TreeMap[RangeKey, StackRegion]] = mutable.Map[Procedure, mutable.TreeMap[RangeKey, StackRegion]]() + private val bufferedSharedStackMap: mutable.Map[String, mutable.Map[Procedure, mutable.TreeMap[RangeKey, StackRegion]]] = mutable.Map() private val heapMap: mutable.Map[RangeKey, HeapRegion] = mutable.TreeMap() private val dataMap: mutable.Map[RangeKey, DataRegion] = mutable.TreeMap() @@ -40,6 +42,21 @@ class MemoryModelMap { * otherwise to the stackMap */ def add(offset: BigInt, region: MemoryRegion, shared: Boolean = false): Unit = { + def maxSize(r: MemoryRegion): BigInt = { + r match + case DataRegion(regionIdentifier, start) => ??? + case HeapRegion(regionIdentifier, size, parent) => ??? + case StackRegion(regionIdentifier, start, parent) => + if (r.subAccesses.nonEmpty) { + val max = start + r.subAccesses.max + r.fields ++= r.subAccesses.diff(Set(max)).map(_ + start) + max + } else { + ??? + } + case _ => ??? + } + region match { case s: StackRegion => var currentStackMap = stackMap @@ -47,14 +64,21 @@ class MemoryModelMap { currentStackMap = sharedStackMap.getOrElseUpdate(s.parent, mutable.TreeMap()) } if (currentStackMap.isEmpty) { - currentStackMap(RangeKey(offset, MAX_BIGINT)) = s + currentStackMap(RangeKey(offset, maxSize(region) - 1)) = s } else { val currentMaxRange = currentStackMap.keys.maxBy(_.end) val currentMaxRegion = currentStackMap(currentMaxRange) - currentStackMap.remove(currentMaxRange) - val updatedRange = RangeKey(currentMaxRange.start, offset - 1) - currentStackMap.addOne(updatedRange -> currentMaxRegion) - currentStackMap(RangeKey(offset, MAX_BIGINT)) = s + if (offset <= currentMaxRange.end) { + currentStackMap.remove(currentMaxRange) + currentMaxRegion.fields += offset + val updatedRange = RangeKey(currentMaxRange.start, offset + maxSize(region) - 1) + currentStackMap.addOne(updatedRange -> currentMaxRegion) + for (elem <- region.fields) { + currentMaxRegion.fields += offset + elem + } + } else { + currentStackMap(RangeKey(offset, maxSize(region) - 1)) = s + } } case d: DataRegion => val currentDataMap = dataMap @@ -197,10 +221,15 @@ class MemoryModelMap { } // TODO: push and pop could be optimised by caching the results def pushContext(funName: String): Unit = { - contextStack.push(localStacks(funName)) + contextStack.push(funName) stackMap.clear() - for (stackRgn <- contextStack.top) { - add(stackRgn.start, stackRgn) + if (bufferedStackMap.contains(funName)) { + stackMap ++= bufferedStackMap(funName) + } else { + for (stackRgn <- localStacks(contextStack.top).sortBy(_.start)) { + add(stackRgn.start, stackRgn) + } + bufferedStackMap(funName) = stackMap.clone() } if (!sharedStacks.contains(funName)) { @@ -208,8 +237,13 @@ class MemoryModelMap { } sharedContextStack.push(sharedStacks(funName)) sharedStackMap.clear() - for (stackRgn <- sharedContextStack.top) { - add(stackRgn.start, stackRgn, true) + if (bufferedSharedStackMap.contains(funName)) { + sharedStackMap ++= bufferedSharedStackMap(funName) + } else { + for (stackRgn <- sharedContextStack.top.sortBy(_.start)) { + add(stackRgn.start, stackRgn, true) + } + bufferedSharedStackMap(funName) = sharedStackMap.clone() } } @@ -217,16 +251,24 @@ class MemoryModelMap { if (contextStack.size > 1) { contextStack.pop() stackMap.clear() - for (stackRgn <- contextStack.top) { - add(stackRgn.start, stackRgn) + if (bufferedStackMap.contains(contextStack.top)) { + stackMap ++= bufferedStackMap(contextStack.top) + } else { + for (stackRgn <- localStacks(contextStack.top)) { + add(stackRgn.start, stackRgn) + } } } if (sharedContextStack.size > 1) { sharedContextStack.pop() sharedStackMap.clear() - for (stackRgn <- sharedContextStack.top) { - add(stackRgn.start, stackRgn, true) + if (bufferedSharedStackMap.contains(contextStack.top)) { + sharedStackMap ++= bufferedSharedStackMap(contextStack.top) + } else { + for (stackRgn <- sharedContextStack.top) { + add(stackRgn.start, stackRgn, true) + } } } } @@ -435,11 +477,12 @@ class MemoryModelMap { trait MemoryRegion { val regionIdentifier: String - val subRegions: mutable.Set[MemoryRegion] = mutable.Set() + val subAccesses: mutable.Set[BigInt] = mutable.Set() + val fields: mutable.Set[BigInt] = mutable.Set() } case class StackRegion(override val regionIdentifier: String, start: BigInt, parent: Procedure) extends MemoryRegion { - override def toString: String = s"Stack($regionIdentifier, $start, ${parent.name})" + override def toString: String = s"Stack($regionIdentifier, $start, ${parent.name}, $subAccesses)" } case class HeapRegion(override val regionIdentifier: String, size: BigInt, parent: Procedure) extends MemoryRegion { diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 9e064648b..88a1f9fef 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -40,15 +40,18 @@ trait MemoryRegionAnalysis(val program: Program, * @param parent : the function entry node * @return the stack region corresponding to the offset */ - private def poolMaster(expr: BigInt, stackBase: Procedure): StackRegion = { + private def poolMaster(expr: BigInt, stackBase: Procedure, subAccess: BigInt): StackRegion = { val stackPool = stackMap.getOrElseUpdate(stackBase, mutable.HashMap()) + var region: StackRegion = null if (stackPool.contains(expr)) { - stackPool(expr) + region = stackPool(expr) } else { val newRegion = StackRegion(nextStackCount(), expr, stackBase) stackPool += (expr -> newRegion) - newRegion + region = newRegion } + region.subAccesses.add(subAccess/8) + region } private def stackDetection(stmt: Statement): Unit = { @@ -92,7 +95,7 @@ trait MemoryRegionAnalysis(val program: Program, private val registerToRegions: mutable.Map[RegisterVariableWrapper, mutable.Set[MemoryRegion]] = mutable.Map() val procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]] = mutable.Map() - def reducibleToRegion(binExpr: BinaryExpr, n: Command): Set[MemoryRegion] = { + def reducibleToRegion(binExpr: BinaryExpr, n: Command, subAccess: BigInt): Set[MemoryRegion] = { var reducedRegions = Set.empty[MemoryRegion] binExpr.arg1 match { case variable: Variable if !spList.contains(variable) => @@ -100,36 +103,36 @@ trait MemoryRegionAnalysis(val program: Program, for (i <- ctx) { val regions = i.rhs match { case memoryLoad: MemoryLoad => - eval(memoryLoad.index, Set.empty, i) + eval(memoryLoad.index, Set.empty, i, memoryLoad.size) case _: BitVecLiteral => Set.empty case _ => - eval(i.rhs, Set.empty, i) + eval(i.rhs, Set.empty, i, -1) // TODO: is the subAccess correct here? } evaluateExpression(binExpr.arg2, constantProp(n)) match { case Some(b: BitVecLiteral) => regions.foreach { case stackRegion: StackRegion => val nextOffset = bitVectorOpToBigIntOp(binExpr.op, stackRegion.start, b.value) - reducedRegions = reducedRegions + poolMaster(nextOffset, IRWalk.procedure(n)) + reducedRegions = reducedRegions + poolMaster(nextOffset, IRWalk.procedure(n), subAccess) case _ => } case None => } } case _ => - eval(binExpr, Set.empty, n) + eval(binExpr, Set.empty, n, subAccess) } reducedRegions } - def reducibleVariable(variable: Variable, n: Command): Set[MemoryRegion] = { + def reducibleVariable(variable: Variable, n: Command, subAccess: BigInt): Set[MemoryRegion] = { var regions = Set.empty[MemoryRegion] val ctx = getDefinition(variable, n, reachingDefs) for (i <- ctx) { i.rhs match { case binaryExpr: BinaryExpr => - regions = regions ++ reducibleToRegion(binaryExpr, i) + regions = regions ++ reducibleToRegion(binaryExpr, i, subAccess) case _ => //regions = regions ++ eval(i.rhs, Set.empty, i) } @@ -137,7 +140,7 @@ trait MemoryRegionAnalysis(val program: Program, regions } - def eval(exp: Expr, env: Set[MemoryRegion], n: Command): Set[MemoryRegion] = { + def eval(exp: Expr, env: Set[MemoryRegion], n: Command, subAccess: BigInt): Set[MemoryRegion] = { Logger.debug(s"evaluating $exp") Logger.debug(s"env: $env") Logger.debug(s"n: $n") @@ -147,35 +150,35 @@ trait MemoryRegionAnalysis(val program: Program, evaluateExpression(binOp.arg2, constantProp(n)) match { case Some(b: BitVecLiteral) => val negB = if isNegative(b) then -b.value else b.value - Set(poolMaster(negB, IRWalk.procedure(n))) + Set(poolMaster(negB, IRWalk.procedure(n), subAccess)) case None => env } - } else if (reducibleToRegion(binOp, n).nonEmpty) { - reducibleToRegion(binOp, n) + } else if (reducibleToRegion(binOp, n, subAccess).nonEmpty) { + reducibleToRegion(binOp, n, subAccess) } else { evaluateExpression(binOp, constantProp(n)) match { - case Some(b: BitVecLiteral) => eval(b, env, n) + case Some(b: BitVecLiteral) => eval(b, env, n, subAccess) case None => env } } case variable: Variable => variable match { case reg: Register if spList.contains(reg) => // TODO: this is a hack - eval(BitVecLiteral(0, 64), env, n) + eval(BitVecLiteral(0, 64), env, n, subAccess) case _ => evaluateExpression(variable, constantProp(n)) match { case Some(b: BitVecLiteral) => - eval(b, env, n) + eval(b, env, n, subAccess) case _ => - reducibleVariable(variable, n) + reducibleVariable(variable, n, subAccess) } } case memoryLoad: MemoryLoad => - eval(memoryLoad.index, env, n) + eval(memoryLoad.index, env, n, memoryLoad.size) // ignore case where it could be a global region (loaded later in MMM from relf) case b: BitVecLiteral => val negB = if isNegative(b) then -b.value else b.value - Set(poolMaster(negB, IRWalk.procedure(n))) + Set(poolMaster(negB, IRWalk.procedure(n), subAccess)) // we cannot evaluate this to a concrete value, we need VSA for this case _ => Logger.debug(s"type: ${exp.getClass} $exp\n") @@ -217,14 +220,14 @@ trait MemoryRegionAnalysis(val program: Program, s } case memAssign: MemoryAssign => - val result = eval(memAssign.index, s, cmd) + val result = eval(memAssign.index, s, cmd, memAssign.size) regionLattice.lub(s, result) case assign: Assign => stackDetection(assign) var m = s unwrapExpr(assign.rhs).foreach { case memoryLoad: MemoryLoad => - val result = eval(memoryLoad.index, s, cmd) + val result = eval(memoryLoad.index, s, cmd, memoryLoad.size) m = regionLattice.lub(m, result) case _ => m } From 0e0a98c8a817f4059b613483789dda92113e197d Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Thu, 5 Sep 2024 13:47:13 +1000 Subject: [PATCH 045/104] Corrected negative addresses --- src/main/scala/analysis/MemoryRegionAnalysis.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 88a1f9fef..d55b41529 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -149,7 +149,7 @@ trait MemoryRegionAnalysis(val program: Program, if (spList.contains(binOp.arg1)) { evaluateExpression(binOp.arg2, constantProp(n)) match { case Some(b: BitVecLiteral) => - val negB = if isNegative(b) then -b.value else b.value + val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value Set(poolMaster(negB, IRWalk.procedure(n), subAccess)) case None => env } @@ -163,7 +163,7 @@ trait MemoryRegionAnalysis(val program: Program, } case variable: Variable => variable match { - case reg: Register if spList.contains(reg) => // TODO: this is a hack + case reg: Register if spList.contains(reg) => // TODO: this is a hack because spList is not comprehensive it needs to be a standalone analysis eval(BitVecLiteral(0, 64), env, n, subAccess) case _ => evaluateExpression(variable, constantProp(n)) match { @@ -177,7 +177,7 @@ trait MemoryRegionAnalysis(val program: Program, eval(memoryLoad.index, env, n, memoryLoad.size) // ignore case where it could be a global region (loaded later in MMM from relf) case b: BitVecLiteral => - val negB = if isNegative(b) then -b.value else b.value + val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value Set(poolMaster(negB, IRWalk.procedure(n), subAccess)) // we cannot evaluate this to a concrete value, we need VSA for this case _ => @@ -212,7 +212,7 @@ trait MemoryRegionAnalysis(val program: Program, if (directCall.target.name == "malloc") { evaluateExpression(mallocVariable, constantProp(n)) match { case Some(b: BitVecLiteral) => - val negB = if isNegative(b) then -b.value else b.value + val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value regionLattice.lub(s, Set(HeapRegion(nextMallocCount(), negB, IRWalk.procedure(n)))) case None => s } From 250fdee28ffc7c7b022f5b3a3dfc868efad9e3fc Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Mon, 9 Sep 2024 09:51:28 +1000 Subject: [PATCH 046/104] Added sizes to global regions --- src/main/scala/analysis/MemoryModelMap.scala | 8 ++++---- src/main/scala/analysis/MemoryRegionAnalysis.scala | 2 +- src/main/scala/util/RunUtils.scala | 3 ++- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index 44fb4f2aa..36bf4cd09 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -44,7 +44,7 @@ class MemoryModelMap { def add(offset: BigInt, region: MemoryRegion, shared: Boolean = false): Unit = { def maxSize(r: MemoryRegion): BigInt = { r match - case DataRegion(regionIdentifier, start) => ??? + case DataRegion(regionIdentifier, start, size) => start + size case HeapRegion(regionIdentifier, size, parent) => ??? case StackRegion(regionIdentifier, start, parent) => if (r.subAccesses.nonEmpty) { @@ -173,12 +173,12 @@ class MemoryModelMap { procedureToRegions } - def convertMemoryRegions(memoryRegions: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]], externalFunctions: Map[BigInt, String], globalOffsets: Map[BigInt, BigInt], globalAddresses: Map[BigInt, String], procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]]): Unit = { + def convertMemoryRegions(memoryRegions: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]], externalFunctions: Map[BigInt, String], globalOffsets: Map[BigInt, BigInt], globalAddresses: Map[BigInt, String], globalSizes: Map[String, Int], procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]]): Unit = { // map externalFunctions name, value to DataRegion(name, value) and then sort by value val reversedExternalFunctionRgns = (externalFunctions ++ globalAddresses).map((offset, name) => resolveInverseGlobalOffset(name, offset, globalOffsets) -> name) val filteredGlobalOffsets = globalAddresses.filterNot((offset, name) => reversedExternalFunctionRgns.contains(offset)) - val externalFunctionRgns = (reversedExternalFunctionRgns ++ filteredGlobalOffsets).map((offset, name) => DataRegion(name, offset)) + val externalFunctionRgns = (reversedExternalFunctionRgns ++ filteredGlobalOffsets).map((offset, name) => DataRegion(name, offset, globalSizes.getOrElse(name, 0))) // we should collect all data regions otherwise the ordering might be wrong var dataRgns: Set[DataRegion] = Set.empty @@ -489,7 +489,7 @@ case class HeapRegion(override val regionIdentifier: String, size: BigInt, paren override def toString: String = s"Heap($regionIdentifier, $size)" } -case class DataRegion(override val regionIdentifier: String, start: BigInt) extends MemoryRegion { +case class DataRegion(override val regionIdentifier: String, start: BigInt, size: BigInt) extends MemoryRegion { override def toString: String = s"Data($regionIdentifier, $start)" } diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index d55b41529..8fcc84d86 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -50,7 +50,7 @@ trait MemoryRegionAnalysis(val program: Program, stackPool += (expr -> newRegion) region = newRegion } - region.subAccesses.add(subAccess/8) + region.subAccesses.add((subAccess.toDouble/8).ceil.toInt) region } diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 11d96afa0..12a12211f 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -576,6 +576,7 @@ object StaticAnalysis { .map(p => BigInt(p.address.get) -> p.name) .toMap val globalAddresses = globals.map(s => s.address -> s.name).toMap + val globalSizes = globals.map(s => s.name -> s.size).toMap val externalAddresses = externalFunctions.map(e => e.offset -> e.name).toMap Logger.info("Globals:") Logger.info(globalAddresses) @@ -678,7 +679,7 @@ object StaticAnalysis { Logger.info("[!] Running MMM") val mmm = MemoryModelMap() - mmm.convertMemoryRegions(mraResult, mergedSubroutines, globalOffsets, globalAddresses, mraSolver.procedureToSharedRegions) + mmm.convertMemoryRegions(mraResult, mergedSubroutines, globalOffsets, globalAddresses, globalSizes, mraSolver.procedureToSharedRegions) mmm.logRegions() Logger.info("[!] Injecting regions") From a873a0645fcdaf8923914ed09765c5b886f4fccc Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Mon, 9 Sep 2024 10:37:58 +1000 Subject: [PATCH 047/104] A fix for global accesses --- src/main/scala/analysis/MemoryModelMap.scala | 14 ++++++++------ src/main/scala/util/RunUtils.scala | 5 +++-- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index 36bf4cd09..bc08a6280 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -83,14 +83,16 @@ class MemoryModelMap { case d: DataRegion => val currentDataMap = dataMap if (currentDataMap.isEmpty) { - currentDataMap(RangeKey(offset, MAX_BIGINT)) = d + currentDataMap(RangeKey(offset, maxSize(d) - 1)) = d } else { val currentMaxRange = currentDataMap.keys.maxBy(_.end) val currentMaxRegion = currentDataMap(currentMaxRange) - currentDataMap.remove(currentMaxRange) - val updatedRange = RangeKey(currentMaxRange.start, offset - 1) - currentDataMap.addOne(updatedRange -> currentMaxRegion) - currentDataMap(RangeKey(offset, MAX_BIGINT)) = d + if (offset <= currentMaxRange.end) { + currentDataMap.remove(currentMaxRange) // TODO: this removes previously overlapping parent region (jumptable2 example) which favours more fine grained regions + currentDataMap(RangeKey(offset, maxSize(d) - 1)) = d + } else { + currentDataMap(RangeKey(offset, maxSize(d) - 1)) = d + } } case h: HeapRegion => val currentHeapMap = heapMap @@ -178,7 +180,7 @@ class MemoryModelMap { val reversedExternalFunctionRgns = (externalFunctions ++ globalAddresses).map((offset, name) => resolveInverseGlobalOffset(name, offset, globalOffsets) -> name) val filteredGlobalOffsets = globalAddresses.filterNot((offset, name) => reversedExternalFunctionRgns.contains(offset)) - val externalFunctionRgns = (reversedExternalFunctionRgns ++ filteredGlobalOffsets).map((offset, name) => DataRegion(name, offset, globalSizes.getOrElse(name, 0))) + val externalFunctionRgns = (reversedExternalFunctionRgns ++ filteredGlobalOffsets).map((offset, name) => DataRegion(name, offset, (globalSizes.getOrElse(name, 1).toDouble/8).ceil.toInt)) // we should collect all data regions otherwise the ordering might be wrong var dataRgns: Set[DataRegion] = Set.empty diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 12a12211f..55067b55e 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -578,16 +578,17 @@ object StaticAnalysis { val globalAddresses = globals.map(s => s.address -> s.name).toMap val globalSizes = globals.map(s => s.name -> s.size).toMap val externalAddresses = externalFunctions.map(e => e.offset -> e.name).toMap - Logger.info("Globals:") + Logger.info("Globals: ") Logger.info(globalAddresses) Logger.info("Global Offsets: ") Logger.info(globalOffsets) + Logger.info("Global Sizes: ") + Logger.info(globalSizes) Logger.info("External: ") Logger.info(externalAddresses) Logger.info("Subroutine Addresses:") Logger.info(subroutines) - // reducible loops val detector = LoopDetector(IRProgram) val foundLoops = detector.identify_loops() From 7f0ee33115735c3a75e6b6b5583fe30f3ec6422c Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 9 Sep 2024 14:23:06 +1000 Subject: [PATCH 048/104] fixes --- src/main/scala/analysis/DSAUtility.scala | 11 ++++++ src/main/scala/cfg_visualiser/DotTools.scala | 4 +- .../scala/{LocalTest.scala => DSATest.scala} | 39 +++++++++++++++---- 3 files changed, 45 insertions(+), 9 deletions(-) rename src/test/scala/{LocalTest.scala => DSATest.scala} (97%) diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index 550c20631..1253a1d0a 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -418,11 +418,22 @@ class DSG(val proc: Procedure, // private val parent = mutable.Map[DSC, DSC]() val solver: DSAUnionFindSolver[UniTerm] = DSAUnionFindSolver() + /** + * wrapper for find functionality of the union-find + * @param node the node to perform find on + * @return a field which is the tuple (parent node of the input node, starting offset of the input node in its parent) + */ def find(node: DSN) : Field = val (n, offset) = solver.find(node.term) val resultNode = n.asInstanceOf[Derm].node Field(resultNode, offset) + /** + * wrapper for find functionality of the union-find + * + * @param cell the cell to perform find on + * @return the input cell's equivalent cell in the parent + */ def find(cell: DSC) : DSC = val node = cell.node.get val offset = cell.offset diff --git a/src/main/scala/cfg_visualiser/DotTools.scala b/src/main/scala/cfg_visualiser/DotTools.scala index a57e1cc01..d89cf8333 100644 --- a/src/main/scala/cfg_visualiser/DotTools.scala +++ b/src/main/scala/cfg_visualiser/DotTools.scala @@ -139,11 +139,11 @@ class DotGraph(val title: String, val nodes: Iterable[DotNode], val edges: Itera -class DotStruct(val id: String, val details: String, val fields: Option[Iterable[String]], val verbose: Boolean = false) extends DotElement { +class DotStruct(val id: String, val details: String, val fields: Option[Iterable[String]], val verbose: Boolean = true) extends DotElement { def equals(other: DotStruct): Boolean = toDotString.equals(other.toDotString) - val label = s"\"{<$id> ${if verbose then details else id} ${if fields.isDefined then s" | {${fields.get.map(f => s"<$f> $f").mkString("|")}}" else "" }}\"" + val label = s"\"{<$id> ${if verbose then wrap(details, 80) else id} ${if fields.isDefined then s" | {${fields.get.map(f => s"<$f> $f").mkString("|")}}" else "" }}\"" override def toString: String = toDotString override def toDotString: String = diff --git a/src/test/scala/LocalTest.scala b/src/test/scala/DSATest.scala similarity index 97% rename from src/test/scala/LocalTest.scala rename to src/test/scala/DSATest.scala index b232745bf..9222da611 100644 --- a/src/test/scala/LocalTest.scala +++ b/src/test/scala/DSATest.scala @@ -7,7 +7,19 @@ import ir.dsl.* import specification.Specification import util.{BASILConfig, BoogieGeneratorConfig, ILLoadingConfig, IRContext, RunUtils, StaticAnalysisConfig} -class LocalTest extends AnyFunSuite, TestUtil { +/** + * This is the test suite for testing DSA functionality + * The tests follow a general pattern of running BASIL analyses on a test program + * and then asserting properties about the Data Structure Graph (DSG) of the function produced at + * different levels + * + * DSA has three phases. + * BASILRESULT.analysis.get.local is the set of graphs from the end of the local phase + * BASILRESULT.analysis.get.bu is the set of graphs from the end of the bottom-up phase + * BASILRESULT.analysis.get.td is the set of graphs from the end of the top-down phase + * + */ +class DSATest extends AnyFunSuite, TestUtil { // Local DSA tests test("basic pointer") { @@ -25,22 +37,31 @@ class LocalTest extends AnyFunSuite, TestUtil { ) ) val program = results.ir.program + + // the dsg of the main procedure after the local phase val dsg = results.analysis.get.locals.get(program.mainProcedure) -// assert(dsg.pointTo.size == 12) // 12 - val framePointer = dsg.adjust(dsg.find(dsg.stackMapping(0).cells(0)).getPointee) + + + // dsg.formals(R29) is the slice representing formal R29 val R29formal = dsg.adjust(dsg.formals(R29)) - assert(framePointer.equals(R29formal)) + + + // cells representing the stack at various offsets + val stack0 = dsg.find(dsg.stackMapping(0).cells(0)) // R31 val stack8 = dsg.find(dsg.stackMapping(8).cells(0)) // R31 + 8 - assert(dsg.adjust(stack8.getPointee).equals(dsg.adjust(dsg.formals(R30)))) val stack40 = dsg.find(dsg.stackMapping(40).cells(0))// R31 + 40 val stack32 = dsg.find(dsg.stackMapping(32).cells(0)) // R31 + 32 val stack24 = dsg.find(dsg.stackMapping(24).cells(0)) // R31 + 24 and Malloc - assert(dsg.adjust(stack32.getPointee).equals(stack24)) - assert(stack24.node.get.collapsed) + + assert(dsg.adjust(stack0.getPointee).equals(R29formal)) // R31 points to the frame pointer + assert(dsg.adjust(stack8.getPointee).equals(dsg.adjust(dsg.formals(R30)))) // R31 + 8 points to the link register + assert(dsg.adjust(stack32.getPointee).equals(stack24)) // + assert(stack24.node.get.collapsed) // stack24 is collapsed assert(dsg.adjust(stack24.getPointee).equals(stack24)) assert(dsg.find(dsg.adjust(stack40.getPointee)).equals(dsg.find(dsg.adjust(dsg.find(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69600))._1.cells(0).getPointee)).getPointee)))) + writeToFile(dsg.toDot, "test1.dot") } test("local jumptable2 sub_seven") { @@ -198,8 +219,12 @@ class LocalTest extends AnyFunSuite, TestUtil { outputPrefix = "boogie_out", ) ) + + val program = results.ir.program val dsg = results.analysis.get.locals.get(program.mainProcedure) + writeToFile(dsg.toDot, "test2.dot") + val stack0 = dsg.adjust(dsg.find(dsg.stackMapping(0).cells(0)).getPointee) val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) val stack24 = dsg.adjust(dsg.find(dsg.stackMapping(24).cells(0)).getPointee) From 4dc7239c98340e7356cc3134bb40f36259dd7b31 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 9 Sep 2024 17:47:50 +1000 Subject: [PATCH 049/104] documented tests --- src/main/scala/analysis/DSAUtility.scala | 3 - src/test/scala/DSATest.scala | 404 +++++++---------------- 2 files changed, 128 insertions(+), 279 deletions(-) diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index 1253a1d0a..940250eba 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -6,7 +6,6 @@ import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, Di import specification.{ExternalFunction, SpecGlobal, SymbolTableEntry} import scala.util.control.Breaks.{break, breakable} -import java.math.BigInteger import scala.collection.mutable object NodeCounter { @@ -42,8 +41,6 @@ class DSG(val proc: Procedure, // DSNodes owned by this graph, only updated once analysis is done, val nodes: mutable.Set[DSN] = mutable.Set() - // this is mapping of point-relations in the graph -// val pointTo: mutable.Map[DSC, Slice] = mutable.Map() // represent callees in proc val callsites: mutable.Set[CallSite] = mutable.Set() diff --git a/src/test/scala/DSATest.scala b/src/test/scala/DSATest.scala index 9222da611..efa08b2ec 100644 --- a/src/test/scala/DSATest.scala +++ b/src/test/scala/DSATest.scala @@ -56,81 +56,40 @@ class DSATest extends AnyFunSuite, TestUtil { assert(dsg.adjust(stack0.getPointee).equals(R29formal)) // R31 points to the frame pointer assert(dsg.adjust(stack8.getPointee).equals(dsg.adjust(dsg.formals(R30)))) // R31 + 8 points to the link register - assert(dsg.adjust(stack32.getPointee).equals(stack24)) // - assert(stack24.node.get.collapsed) // stack24 is collapsed - assert(dsg.adjust(stack24.getPointee).equals(stack24)) - assert(dsg.find(dsg.adjust(stack40.getPointee)).equals(dsg.find(dsg.adjust(dsg.find(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69600))._1.cells(0).getPointee)).getPointee)))) - writeToFile(dsg.toDot, "test1.dot") - } - test("local jumptable2 sub_seven") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.locals.get(program.procs("sub_seven")) -// assert(dsg.pointTo.size == 9) - assert(dsg.stackMapping.isEmpty) - assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.get.collapsed) + assert(dsg.adjust(stack32.getPointee).equals(stack24)) // R31 + 32 points to R31 + 24, 00000457 + assert(stack24.node.get.collapsed) // 00000497 collapses stack24 concatenation is currently unhandled, any objects referenced in an unhandled operation are collapsed + assert(dsg.adjust(stack24.getPointee).equals(stack24)) // 00000466, R31 + 32 and R31 + 24 pointees are merged - // initial global mappings - assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + // __stack_chk_guard's pointee is also pointed to by stack40 + assert(dsg.find(dsg.adjust(stack40.getPointee)).equals(dsg.find(dsg.adjust(dsg.find(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69600))._1.cells(0).getPointee)).getPointee)))) } - test("local jumptable2 add_six") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - val program = results.ir.program - val dsg = results.analysis.get.locals.get(program.procs("add_six")) -// assert(dsg.pointTo.size == 9) - assert(dsg.stackMapping.isEmpty) - assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.get.collapsed) + // this function asserts universal properties about global objects in Jumptable2 example + def assertJumptable2Globals(dsg: DSG) : Unit = { + // global mappings - // initial global mappings + // __libc_csu_init relocation assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) + // __lib_csu_fini relocation + assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) + // jumptable relocation assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + // add_two relocation + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) + // add_six relocation assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) + // sub_seven relocation assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) + // main relocation + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + // x relocation assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) - } - test("local jumptable2 add_two") { + test("local jumptable2 callees") { val results = RunUtils.loadAndTranslate( BASILConfig( loading = ILLoadingConfig( @@ -144,22 +103,22 @@ class DSATest extends AnyFunSuite, TestUtil { outputPrefix = "boogie_out", ) ) - val program = results.ir.program - val dsg = results.analysis.get.locals.get(program.procs("add_two")) -// assert(dsg.pointTo.size == 9) - assert(dsg.stackMapping.isEmpty) - assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.get.collapsed) - // initial global mappings - assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + + val program = results.ir.program + // test that all three calles have the same local graph + val callees = Set("sub_seven", "add_two", "add_six") + callees.foreach( + callee => + val dsg = results.analysis.get.locals.get(program.procs(callee)) + assert(dsg.stackMapping.isEmpty) // stack is not used in either callee + assertJumptable2Globals(dsg) // globals should be the same everywhere unused in callees + // x should point to a collapsed object, in all 3 functions + // all three load value of x + // the analysis doesn't know if x is a pointer or not therefore assumes it is for soundness + // arbitrary pointer is used in arithmetic causing collapse + assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.get.collapsed) + ) } @@ -181,26 +140,20 @@ class DSATest extends AnyFunSuite, TestUtil { val program = results.ir.program val dsg = results.analysis.get.locals.get(program.mainProcedure) -// assert(dsg.pointTo.size == 12) // 12 - val framePointer = dsg.find(dsg.stackMapping(0).cells(0)) + val stack0 = dsg.find(dsg.stackMapping(0).cells(0)) val stack8 = dsg.find(dsg.stackMapping(8).cells(0)) val stack16 = dsg.find(dsg.stackMapping(16).cells(0)) val stack28 = dsg.find(dsg.stackMapping(28).cells(0)) - assert(dsg.adjust(framePointer.getPointee).equals(dsg.adjust(dsg.formals(R29)))) + assert(dsg.adjust(stack0.getPointee).equals(dsg.adjust(dsg.formals(R29)))) assert(dsg.adjust(stack8.getPointee).equals(dsg.adjust(dsg.formals(R30)))) - assert(dsg.adjust(stack16.getPointee).equals(dsg.adjust(dsg.formals(R1)))) - assert(dsg.adjust(stack28.getPointee).equals(dsg.adjust(dsg.formals(R0)))) + assert(dsg.adjust(stack16.getPointee).equals(dsg.adjust(dsg.formals(R1)))) // input args + assert(dsg.adjust(stack28.getPointee).equals(dsg.adjust(dsg.formals(R0)))) // input args // initial global mappings - assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + assertJumptable2Globals(dsg) + + // x should not be collapsed in the main function's local graph + assert(!dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)).getPointee.node.collapsed) } @@ -223,8 +176,8 @@ class DSATest extends AnyFunSuite, TestUtil { val program = results.ir.program val dsg = results.analysis.get.locals.get(program.mainProcedure) - writeToFile(dsg.toDot, "test2.dot") + // stackX is the pointee of stack object at position X instead of the stack object itself val stack0 = dsg.adjust(dsg.find(dsg.stackMapping(0).cells(0)).getPointee) val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) val stack24 = dsg.adjust(dsg.find(dsg.stackMapping(24).cells(0)).getPointee) @@ -232,23 +185,38 @@ class DSATest extends AnyFunSuite, TestUtil { val stack40 = dsg.adjust(dsg.find(dsg.stackMapping(40).cells(0)).getPointee) val stack48 = dsg.adjust(dsg.find(dsg.stackMapping(48).cells(0)).getPointee) val stack56 = dsg.adjust(dsg.find(dsg.stackMapping(56).cells(0)).getPointee) -// assert(dsg.pointTo.size==10) + + assert(stack0.equals(dsg.adjust(dsg.formals(R29)))) assert(stack8.equals(dsg.adjust(dsg.formals(R30)))) + + + + // stack24 and stack32 should point to the beginning of first Malloc (size 20) assert(stack24.equals(stack32)) assert(stack24.offset == 0) assert(stack24.node.get.allocationRegions.size == 1) assert(stack24.node.get.allocationRegions.head.asInstanceOf[HeapLocation].size == 20) - assert(stack40.node.get.allocationRegions.size == 1) - assert(stack48.node.get.allocationRegions.head.asInstanceOf[HeapLocation].size == 8) - assert(dsg.adjust(stack48.getPointee).equals(stack40)) - assert(dsg.adjust(stack48.getPointee).equals(stack56)) + + + // stack24 and stack40 should be pointing to the same cell at different internal offsets val unadjustedStack24Pointee = dsg.find(dsg.stackMapping(24).cells(0)).getPointee val unadjustedStack40Pointee = dsg.find(dsg.stackMapping(40).cells(0)).getPointee assert(unadjustedStack24Pointee.cell.equals(unadjustedStack40Pointee.cell)) - assert(unadjustedStack40Pointee.internalOffset == 1) + assert(unadjustedStack40Pointee.internalOffset == 1) // result of unsafe pointer arithmetic assert(unadjustedStack24Pointee.internalOffset == 0) assert(unadjustedStack24Pointee.offset == 0) + + // stack48 points to second malloc (size 8) + assert(stack48.node.get.allocationRegions.size == 1) + assert(stack48.node.get.allocationRegions.head.asInstanceOf[HeapLocation].size == 8) + + + // stack 48 points to a malloc address which point to the pointee of stack40 and stack56 + assert(dsg.adjust(stack48.getPointee).equals(stack40)) + assert(dsg.adjust(stack48.getPointee).equals(stack56)) + + } test("interproc pointer arithmetic main") { @@ -275,9 +243,13 @@ class DSATest extends AnyFunSuite, TestUtil { assert(stack0.equals(dsg.adjust(dsg.formals(R29)))) assert(stack8.equals(dsg.adjust(dsg.formals(R30)))) + + // stack24 and 32 point to different offsets of the same node assert(stack24.node.get.equals(stack32.node.get)) assert(stack24.offset == 0) assert(stack32.offset == 16) + + // stack40 points to a different offset of stack24's node but the analysis can't determine that in the local phase assert(stack40._pointee.isDefined) assert(!stack40.node.get.equals(stack24.node.get)) } @@ -301,22 +273,24 @@ class DSATest extends AnyFunSuite, TestUtil { val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) val stack24 = dsg.adjust(dsg.find(dsg.stackMapping(24).cells(0)).getPointee) + // stack8 points to the formal argument object assert(stack8.equals(dsg.adjust(dsg.formals(R0)))) assert(stack8.offset == 0) + // stack 24 points to the formal argument object at offset 16, instead assert(stack24.equals(dsg.adjust(dsg.formals(R0)).node.get.cells(16))) } test("internal merge") { + // this is an internal merge (two cells of the same node overlap and are merged together) val mem = SharedMemory("mem", 10000, 10000) val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) var program = prog( proc("main", block("operations", -// Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), - locAssign1, - locAssign2, + locAssign1, // R6 = R0 + 4 + locAssign2, // R7 = R0 + 5 MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), ret @@ -326,13 +300,18 @@ class DSATest extends AnyFunSuite, TestUtil { val returnUnifier = ConvertToSingleProcedureReturn() program = returnUnifier.visitProgram(program) - val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) val dsg: DSG = results.locals.get(program.mainProcedure) + + // object of formals R1 and R2 are written to overlapping fields of the same node? causing them to be merged together assert(dsg.adjust(dsg.formals(R1)).equals(dsg.adjust(dsg.formals(R2)))) + + // R6 and R7 address the same cell assert(dsg.find(dsg.varToCell(locAssign1)(R6)).cell.equals(dsg.find(dsg.varToCell(locAssign2)(R7)).cell)) + // however, they address different internal offets in those cells assert(dsg.find(dsg.varToCell(locAssign1)(R6)).internalOffset == 0) assert(dsg.find(dsg.varToCell(locAssign2)(R7)).internalOffset == 1) + // R6 points to input R1 assert(dsg.adjust(dsg.varToCell(locAssign1)(R6))._pointee.isDefined) assert(dsg.adjust(dsg.adjust(dsg.varToCell(locAssign1)(R6)).getPointee).equals(dsg.adjust(dsg.formals(R1)))) @@ -347,11 +326,11 @@ class DSATest extends AnyFunSuite, TestUtil { var program = prog( proc("main", block("operations", - locAssign1, - locAssign2, + locAssign1, // R6 = R0 + 4 + locAssign2, // R7 = R0 + 5 MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), - locAssign3, + locAssign3, // R5 = R7 + 8 ret ) ) @@ -362,10 +341,13 @@ class DSATest extends AnyFunSuite, TestUtil { val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) val dsg: DSG = results.locals.get(program.mainProcedure) + // check that R5 points to separate cell at offset 13 assert(dsg.find(dsg.varToCell(locAssign3)(R5)).offset == 13) } test("offsetting from middle of cell to the same cell") { + // similar to above except instead of creating new cell the last assign + // points R7's cell at an internal offset of 8 val mem = SharedMemory("mem", 10000, 10000) val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) @@ -374,7 +356,6 @@ class DSATest extends AnyFunSuite, TestUtil { var program = prog( proc("main", block("operations", - // Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), locAssign1, locAssign2, MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), @@ -401,6 +382,7 @@ class DSATest extends AnyFunSuite, TestUtil { } test("internal offset transfer") { + // this is a test to check assignments transfer internal offset of slices. val mem = SharedMemory("mem", 10000, 10000) val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) @@ -429,7 +411,10 @@ class DSATest extends AnyFunSuite, TestUtil { } // bottom up tests - test("bottom up jumptable2 sub_seven") { + ignore("bottom up jumptable2 callees") { + // this is the same as local graphs + // nothing should be changed + // TODO count point-to relations and ensure no more constraints are added in this phase val results = RunUtils.loadAndTranslate( BASILConfig( loading = ILLoadingConfig( @@ -443,89 +428,26 @@ class DSATest extends AnyFunSuite, TestUtil { outputPrefix = "boogie_out", ) ) - val program = results.ir.program - val dsg = results.analysis.get.bus.get(program.procs("sub_seven")) - assert(dsg.stackMapping.isEmpty) - assert(dsg.find(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.collapsed) - // initial global mappings - assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) - - } - - test("bottom up jumptable2 add_six") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) val program = results.ir.program - val dsg = results.analysis.get.bus.get(program.procs("add_six")) - assert(dsg.stackMapping.isEmpty) - assert(dsg.find(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.collapsed) - - // initial global mappings - assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) - - } - - test("bottomup jumptable2 add_two") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) + // test that all three calles have the same local graph + val callees = Set("sub_seven", "add_two", "add_six") + callees.foreach( + callee => + val dsg = results.analysis.get.bus.get(program.procs(callee)) + assert(dsg.stackMapping.isEmpty) // stack is not used in either callee + assertJumptable2Globals(dsg) // globals should be the same everywhere unused in callees + // x should point to a collapsed object, in all 3 functions + // all three load value of x + // the analysis doesn't know if x is a pointer or not therefore assumes it is for soundness + // arbitrary pointer is used in arithmetic causing collapse + assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.get.collapsed) ) - val program = results.ir.program - val dsg = results.analysis.get.bus.get(program.procs("add_two")) - assert(dsg.stackMapping.isEmpty) - assert(dsg.find(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.collapsed) - - // initial global mappings - assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) } + test("bottom up jumptable2 main") { val results = RunUtils.loadAndTranslate( BASILConfig( @@ -556,24 +478,16 @@ class DSATest extends AnyFunSuite, TestUtil { // initial global mappings - assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + assertJumptable2Globals(dsg) - // bu + // bu x now should be collapsed since it was collapsed in callees assert(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)).getPointee.node.collapsed) } - test("bottom up interproc pointer arithmetic callee") { + ignore("bottom up interproc pointer arithmetic callee") { // same as interproc pointer arithmetic callee's local graph (no changes should have been made) val results = RunUtils.loadAndTranslate( BASILConfig( @@ -623,6 +537,9 @@ class DSATest extends AnyFunSuite, TestUtil { val stack32 = dsg.adjust(dsg.find(dsg.stackMapping(32).cells(0)).getPointee) val stack40 = dsg.adjust(dsg.find(dsg.stackMapping(40).cells(0)).getPointee) + + // same as the local graph with the difference that stack40 points to cell at + // a different of the same node as pointees of stack32 and stack24 assert(stack0.equals(dsg.adjust(dsg.formals(R29)))) assert(stack8.equals(dsg.adjust(dsg.formals(R30)))) assert(stack24.node.get.equals(stack32.node.get)) @@ -639,7 +556,8 @@ class DSATest extends AnyFunSuite, TestUtil { // top down tests - test("top down jumptable2 main") { + ignore("top down jumptable2 main") { + // no changes should be made from previous phase val results = RunUtils.loadAndTranslate( BASILConfig( loading = ILLoadingConfig( @@ -684,7 +602,7 @@ class DSATest extends AnyFunSuite, TestUtil { } - test("top down jumptable2 sub_seven") { + ignore("top down jumptable2 callees") { val results = RunUtils.loadAndTranslate( BASILConfig( loading = ILLoadingConfig( @@ -698,93 +616,26 @@ class DSATest extends AnyFunSuite, TestUtil { outputPrefix = "boogie_out", ) ) - val program = results.ir.program - val dsg = results.analysis.get.tds.get(program.procs("sub_seven")) - assert(dsg.stackMapping.isEmpty) - assert(dsg.find(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.collapsed) - // initial global mappings - assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) - - - } - - test("top down jumptable2 add_six") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) val program = results.ir.program - val dsg = results.analysis.get.tds.get(program.procs("add_six")) - assert(dsg.stackMapping.isEmpty) - assert(dsg.find(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.collapsed) - - - // initial global mappings - assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) - - - } - - test("top down jumptable2 add_two") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) + // test that all three callees have the same local graph + val callees = Set("sub_seven", "add_two", "add_six") + callees.foreach( + callee => + val dsg = results.analysis.get.tds.get(program.procs(callee)) + assert(dsg.stackMapping.isEmpty) // stack is not used in either callee + assertJumptable2Globals(dsg) // globals should be the same everywhere unused in callees + // x should point to a collapsed object, in all 3 functions + // all three load value of x + // the analysis doesn't know if x is a pointer or not therefore assumes it is for soundness + // arbitrary pointer is used in arithmetic causing collapse + assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.get.collapsed) ) - val program = results.ir.program - val dsg = results.analysis.get.tds.get(program.procs("add_two")) - assert(dsg.stackMapping.isEmpty) - assert(dsg.find(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.collapsed) - - // initial global mappings - assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + } - } test("top down interproc pointer arithmetic callee") { val results = RunUtils.loadAndTranslate( @@ -806,6 +657,7 @@ class DSATest extends AnyFunSuite, TestUtil { val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) val stack24 = dsg.adjust(dsg.find(dsg.stackMapping(24).cells(0)).getPointee) + // callee should now have different offsets due to formal and actual input parameters being unified assert(stack8.equals(dsg.adjust(dsg.formals(R0)))) assert(stack8.offset == 16) assert(stack24.equals(dsg.adjust(dsg.formals(R0)).node.get.cells(32))) @@ -814,7 +666,7 @@ class DSATest extends AnyFunSuite, TestUtil { // top down phase should be the same as bu phase - test("top down interproc pointer arithmetic main") { + ignore("top down interproc pointer arithmetic main") { val results = RunUtils.loadAndTranslate( BASILConfig( loading = ILLoadingConfig( From 3df153aff2e98062a6198decfe6753a06ed4e997 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Thu, 12 Sep 2024 14:19:21 +1000 Subject: [PATCH 050/104] faster MMM and heap support --- .../InterprocSteensgaardAnalysis.scala | 2 +- src/main/scala/analysis/MemoryModelMap.scala | 81 ++++--------------- .../scala/analysis/MemoryRegionAnalysis.scala | 15 +++- src/main/scala/util/RunUtils.scala | 2 +- 4 files changed, 30 insertions(+), 70 deletions(-) diff --git a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala index 268fba7e8..6a259a58d 100644 --- a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala +++ b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala @@ -263,7 +263,7 @@ class InterprocSteensgaardAnalysis( case directCall: DirectCall => // X = alloc P: [[X]] = ↑[[alloc-i]] if (directCall.target.name == "malloc") { - val alloc = HeapRegion(nextMallocCount(), 0, IRWalk.procedure(cmd)) + val alloc = mmm.getHeap(directCall) unify(IdentifierVariable(RegisterVariableWrapper(mallocVariable, getUse(mallocVariable, cmd, reachingDefs))), PointerRef(AllocVariable(alloc))) } diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index bc08a6280..6545a58fe 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -31,12 +31,13 @@ class MemoryModelMap { private val bufferedSharedStackMap: mutable.Map[String, mutable.Map[Procedure, mutable.TreeMap[RangeKey, StackRegion]]] = mutable.Map() private val heapMap: mutable.Map[RangeKey, HeapRegion] = mutable.TreeMap() private val dataMap: mutable.Map[RangeKey, DataRegion] = mutable.TreeMap() + private val heapCalls: mutable.Map[DirectCall, HeapRegion] = mutable.Map() private val uf = new UnionFind() /** Add a range and object to the mapping * - * @param offset the offset of the range + * @param offset the offset of the range, if a heap region is given, the offsets controls the shift of regions from the start * @param region the region to add * @param shared if the region is shared. When true, the region is added to the sharedStackMap * otherwise to the stackMap @@ -133,92 +134,33 @@ class MemoryModelMap { tableAddress } - /** - * Get the mapping of procedures to memory regions - * Gets the regions from each CFGPosition and maps them to the corresponding procedure based on the - * parent field of the StackRegion - * No shared regions here - * @param memoryRegions - * @return - */ - def getProceduresToRegionsMapping(memoryRegions: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]]): mutable.Map[Procedure, mutable.Set[MemoryRegion]] = { - val procedureToRegions = mutable.Map[Procedure, mutable.Set[MemoryRegion]]() - for ((position, regions) <- memoryRegions) { - regions match { - case Lift(node) => - for (region <- node) { - region match { - case stackRegion: StackRegion => - val procedure = stackRegion.parent - if (!procedureToRegions.contains(procedure)) { - procedureToRegions(procedure) = mutable.Set() - } - procedureToRegions(procedure) += stackRegion - case heapRegion: HeapRegion => - val procedure = heapRegion.parent - if (!procedureToRegions.contains(procedure)) { - procedureToRegions(procedure) = mutable.Set() - } - procedureToRegions(procedure) += heapRegion - case _ => - } - } - case LiftedBottom => - } - } - // make sure all procedures have results otherwise make them empty - memoryRegions.keys.collect {case procedure: Procedure => procedure}.foreach(procedure => { - if (!procedureToRegions.contains(procedure)) { - procedureToRegions(procedure) = mutable.Set() - } - }) - procedureToRegions - } - - def convertMemoryRegions(memoryRegions: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]], externalFunctions: Map[BigInt, String], globalOffsets: Map[BigInt, BigInt], globalAddresses: Map[BigInt, String], globalSizes: Map[String, Int], procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]]): Unit = { + def convertMemoryRegions(stackRegionsPerProcedure: mutable.Map[Procedure, mutable.Set[StackRegion]], heapRegions: mutable.Map[DirectCall, HeapRegion], externalFunctions: Map[BigInt, String], globalOffsets: Map[BigInt, BigInt], globalAddresses: Map[BigInt, String], globalSizes: Map[String, Int], procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]]): Unit = { // map externalFunctions name, value to DataRegion(name, value) and then sort by value val reversedExternalFunctionRgns = (externalFunctions ++ globalAddresses).map((offset, name) => resolveInverseGlobalOffset(name, offset, globalOffsets) -> name) val filteredGlobalOffsets = globalAddresses.filterNot((offset, name) => reversedExternalFunctionRgns.contains(offset)) val externalFunctionRgns = (reversedExternalFunctionRgns ++ filteredGlobalOffsets).map((offset, name) => DataRegion(name, offset, (globalSizes.getOrElse(name, 1).toDouble/8).ceil.toInt)) - // we should collect all data regions otherwise the ordering might be wrong - var dataRgns: Set[DataRegion] = Set.empty - // get all function exit node - val regionsPerProcedure = getProceduresToRegionsMapping(memoryRegions) - val exitNodes = regionsPerProcedure.keys.collect { case procedure: Procedure => procedure } - - exitNodes.foreach(exitNode => + stackRegionsPerProcedure.keys.foreach(exitNode => if (procedureToSharedRegions.contains(exitNode)) { val sharedRegions = procedureToSharedRegions(exitNode) sharedStacks(exitNode.name) = sharedRegions.collect { case r: StackRegion => r }.toList.sortBy(_.start) } // for each function exit node we get the memory region and add it to the mapping - val stackRgns = regionsPerProcedure(exitNode).collect { case r: StackRegion => r }.toList.sortBy(_.start) - dataRgns = dataRgns ++ regionsPerProcedure(exitNode).collect { case r: DataRegion => r } - + val stackRgns = stackRegionsPerProcedure(exitNode).toList.sortBy(_.start) localStacks(exitNode.name) = stackRgns ) // add externalFunctionRgn to dataRgns and sort by value - val allDataRgns = (dataRgns ++ externalFunctionRgns).toList.sortBy(_.start) + val allDataRgns = externalFunctionRgns.toList.sortBy(_.start) for (dataRgn <- allDataRgns) { add(dataRgn.start, dataRgn) } + heapCalls ++= heapRegions // add heap regions val rangeStart = 0 - for ((position, regions) <- memoryRegions) { - regions match { - case Lift(node) => - for (region <- node) { - region match { - case heapRegion: HeapRegion => - add(BigInt(0), heapRegion) - case _ => - } - } - case LiftedBottom => - } + for (heapRegion <- heapRegions.values) { + add(rangeStart, heapRegion) } } // TODO: push and pop could be optimised by caching the results @@ -475,6 +417,11 @@ class MemoryModelMap { private def returnRegion(region: HeapRegion): HeapRegion = { uf.find(region.asInstanceOf[MemoryRegion]).asInstanceOf[HeapRegion] } + + def getHeap(directCall: DirectCall): HeapRegion = { + require(directCall.target.name == "malloc", "Should be a malloc call") + heapCalls(directCall) + } } trait MemoryRegion { diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 8fcc84d86..43df9ee9e 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -47,6 +47,7 @@ trait MemoryRegionAnalysis(val program: Program, region = stackPool(expr) } else { val newRegion = StackRegion(nextStackCount(), expr, stackBase) + addReturnStack(stackBase, newRegion) stackPool += (expr -> newRegion) region = newRegion } @@ -94,6 +95,16 @@ trait MemoryRegionAnalysis(val program: Program, // TODO: this could be used instead of regionAccesses in other analyses to reduce the Expr to region conversion private val registerToRegions: mutable.Map[RegisterVariableWrapper, mutable.Set[MemoryRegion]] = mutable.Map() val procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]] = mutable.Map() + var procedureToStackRegions: mutable.Map[Procedure, mutable.Set[StackRegion]] = mutable.Map() + var procedureToHeapRegions: mutable.Map[DirectCall, HeapRegion] = mutable.Map() + + def addReturnStack(procedure: Procedure, returnRegion: StackRegion): Unit = { + procedureToStackRegions.getOrElseUpdate(procedure, mutable.Set.empty).add(returnRegion) + } + + def addReturnHeap(directCall: DirectCall, returnRegion: HeapRegion): Unit = { + procedureToHeapRegions.put(directCall, returnRegion) + } def reducibleToRegion(binExpr: BinaryExpr, n: Command, subAccess: BigInt): Set[MemoryRegion] = { var reducedRegions = Set.empty[MemoryRegion] @@ -213,7 +224,9 @@ trait MemoryRegionAnalysis(val program: Program, evaluateExpression(mallocVariable, constantProp(n)) match { case Some(b: BitVecLiteral) => val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value - regionLattice.lub(s, Set(HeapRegion(nextMallocCount(), negB, IRWalk.procedure(n)))) + val newHeapRegion = HeapRegion(nextMallocCount(), negB, IRWalk.procedure(n)) + addReturnHeap(directCall, newHeapRegion) + regionLattice.lub(s, Set(newHeapRegion)) case None => s } } else { diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 55067b55e..4366730d3 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -680,7 +680,7 @@ object StaticAnalysis { Logger.info("[!] Running MMM") val mmm = MemoryModelMap() - mmm.convertMemoryRegions(mraResult, mergedSubroutines, globalOffsets, globalAddresses, globalSizes, mraSolver.procedureToSharedRegions) + mmm.convertMemoryRegions(mraSolver.procedureToStackRegions, mraSolver.procedureToHeapRegions, mergedSubroutines, globalOffsets, globalAddresses, globalSizes, mraSolver.procedureToSharedRegions) mmm.logRegions() Logger.info("[!] Injecting regions") From 40074fdbcf4afff06104a52fbac138c21fa19299 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Thu, 12 Sep 2024 16:40:21 +1000 Subject: [PATCH 051/104] Heap region tracking using points-to --- .../InterprocSteensgaardAnalysis.scala | 165 +++++++++--------- .../scala/analysis/MemoryRegionAnalysis.scala | 3 - .../ReachingDefinitionsAnalysis.scala | 34 +++- src/main/scala/analysis/UtilMethods.scala | 11 +- src/main/scala/analysis/VSA.scala | 3 - 5 files changed, 117 insertions(+), 99 deletions(-) diff --git a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala index 6a259a58d..c8bcdf2ca 100644 --- a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala +++ b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala @@ -57,6 +57,7 @@ class InterprocSteensgaardAnalysis( val stackMap: mutable.Map[Expr, StackRegion] = mutable.Map() private val memoryRegionContents: mutable.Map[MemoryRegion, mutable.Set[BitVecLiteral | MemoryRegion]] = mutable.Map() + private val visited: mutable.Set[CFGPosition] = mutable.Set() def getMemoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]] = memoryRegionContents.map((k, v) => k -> v.toSet).toMap @@ -156,8 +157,6 @@ class InterprocSteensgaardAnalysis( val nextOffset = bitVectorOpToBigIntOp(binExpr.op, stackRegion.start, b.value) reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, BitVecLiteral(nextOffset, 64)), n) case dataRegion: DataRegion => - Logger.debug(s"Hey, I'm a data region: $dataRegion") - Logger.debug(s"Hey, I'm a offset: $b") val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start), b) evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => reducedRegions ++= exprToRegion(b2, n) @@ -248,7 +247,7 @@ class InterprocSteensgaardAnalysis( def analyze(): Unit = // generate the constraints by traversing the AST and solve them on-the-fly program.procedures.foreach(p => { - p.blocks.foreach(b => b.statements.foreach(visit(_, ()))) + visit(p, ()) }) /** Generates the constraints for the given sub-AST. @@ -257,84 +256,92 @@ class InterprocSteensgaardAnalysis( * @param arg * unused for this visitor */ - def visit(cmd: Command, arg: Unit): Unit = { - - cmd match { - case directCall: DirectCall => - // X = alloc P: [[X]] = ↑[[alloc-i]] - if (directCall.target.name == "malloc") { - val alloc = mmm.getHeap(directCall) - unify(IdentifierVariable(RegisterVariableWrapper(mallocVariable, getUse(mallocVariable, cmd, reachingDefs))), PointerRef(AllocVariable(alloc))) - } - - case assign: Assign => - assign.rhs match { - case binOp: BinaryExpr => - // X1 = &X2: [[X1]] = ↑[[X2]] - exprToRegion(binOp, cmd).foreach( - x => unify(IdentifierVariable(RegisterVariableWrapper(assign.lhs, getDefinition(assign.lhs, cmd, reachingDefs))), PointerRef(AllocVariable(x))) - ) - // TODO: should lookout for global base + offset case as well - case _ => - unwrapExpr(assign.rhs).foreach { - case memoryLoad: MemoryLoad => - // X1 = *X2: [[X2]] = ↑a ^ [[X1]] = a where a is a fresh term variable - val X1 = assign.lhs - val X2_star = exprToRegion(memoryLoad.index, cmd) - val alpha = FreshVariable() - X2_star.foreach( - x => unify(ExpressionVariable(x), PointerRef(alpha)) - ) - unify(alpha, IdentifierVariable(RegisterVariableWrapper(X1, getDefinition(X1, cmd, reachingDefs)))) - - Logger.debug("Memory load: " + memoryLoad) - Logger.debug("Index: " + memoryLoad.index) - Logger.debug("X2_star: " + X2_star) - Logger.debug("X1: " + X1) - Logger.debug("Assign: " + assign) - - // TODO: This might not be correct for globals - // X1 = &X: [[X1]] = ↑[[X2]] (but for globals) - val $X2 = exprToRegion(memoryLoad.index, cmd) - $X2.foreach( - x => unify(IdentifierVariable(RegisterVariableWrapper(assign.lhs, getDefinition(assign.lhs, cmd, reachingDefs))), PointerRef(AllocVariable(x))) - ) - case variable: Variable => - // X1 = X2: [[X1]] = [[X2]] - val X1 = assign.lhs - val X2 = variable - unify(IdentifierVariable(RegisterVariableWrapper(X1, getDefinition(X1, cmd, reachingDefs))), IdentifierVariable(RegisterVariableWrapper(X2, getUse(X2, cmd, reachingDefs)))) - case _ => // do nothing - } - } - case memoryAssign: MemoryAssign => - // *X1 = X2: [[X1]] = ↑a ^ [[X2]] = a where a is a fresh term variable - val X1_star = exprToRegion(memoryAssign.index, cmd) - val X2 = evaluateExpressionWithSSA(memoryAssign.value, constantProp(cmd), cmd, reachingDefs) - // TODO: This is risky as it tries to coerce every value to a region (needed for functionpointer example) - val possibleRegions = exprToRegion(memoryAssign.value, cmd) - - Logger.debug("I am at stmt: " + cmd.label) - Logger.debug("Memory assign: " + memoryAssign) - Logger.debug("X2 is: " + X2) - Logger.debug("PossibleRegions instead of X2 " + possibleRegions) - Logger.debug("Evaluated: " + memoryAssign.value) - Logger.debug("Region " + X1_star) - Logger.debug("Index " + memoryAssign.index) - val alpha = FreshVariable() - X1_star.foreach(x => - unify(ExpressionVariable(x), PointerRef(alpha)) - if (!memoryRegionContents.contains(x)) { - memoryRegionContents.addOne(x -> mutable.Set()) + def visit(node: CFGPosition, arg: Unit): Unit = { + if (visited.contains(node)) { + return + } + node match { + case cmd: Command => + cmd match { + case directCall: DirectCall => + // X = alloc P: [[X]] = ↑[[alloc-i]] + if (directCall.target.name == "malloc") { + val alloc = mmm.getHeap(directCall) + val defs = getDefinition(mallocVariable, cmd, reachingDefs, true) + unify(IdentifierVariable(RegisterVariableWrapper(mallocVariable, defs)), PointerRef(AllocVariable(alloc))) + } + case assign: Assign => + assign.rhs match { + case binOp: BinaryExpr => + // X1 = &X2: [[X1]] = ↑[[X2]] + exprToRegion(binOp, cmd).foreach( + x => unify(IdentifierVariable(RegisterVariableWrapper(assign.lhs, getDefinition(assign.lhs, cmd, reachingDefs))), PointerRef(AllocVariable(x))) + ) + // TODO: should lookout for global base + offset case as well + case _ => + unwrapExpr(assign.rhs).foreach { + case memoryLoad: MemoryLoad => + // X1 = *X2: [[X2]] = ↑a ^ [[X1]] = a where a is a fresh term variable + val X1 = assign.lhs + val X2_star = exprToRegion(memoryLoad.index, cmd) + val alpha = FreshVariable() + X2_star.foreach( + x => unify(ExpressionVariable(x), PointerRef(alpha)) + ) + unify(alpha, IdentifierVariable(RegisterVariableWrapper(X1, getDefinition(X1, cmd, reachingDefs)))) + + Logger.debug("Memory load: " + memoryLoad) + Logger.debug("Index: " + memoryLoad.index) + Logger.debug("X2_star: " + X2_star) + Logger.debug("X1: " + X1) + Logger.debug("Assign: " + assign) + + // TODO: This might not be correct for globals + // X1 = &X: [[X1]] = ↑[[X2]] (but for globals) + val $X2 = exprToRegion(memoryLoad.index, cmd) + $X2.foreach( + x => unify(IdentifierVariable(RegisterVariableWrapper(assign.lhs, getDefinition(assign.lhs, cmd, reachingDefs))), PointerRef(AllocVariable(x))) + ) + case variable: Variable => + // X1 = X2: [[X1]] = [[X2]] + val X1 = assign.lhs + val X2 = variable + unify(IdentifierVariable(RegisterVariableWrapper(X1, getDefinition(X1, cmd, reachingDefs))), IdentifierVariable(RegisterVariableWrapper(X2, getUse(X2, cmd, reachingDefs)))) + case _ => // do nothing + } } - memoryRegionContents(x).addAll(X2) - memoryRegionContents(x).addAll(possibleRegions.filter(r => r != x)) - ) - X2.foreach(x => unify(alpha, ExpressionVariable(x))) - possibleRegions.foreach(x => unify(alpha, ExpressionVariable(x))) - case _ => // do nothing TODO: Maybe LocalVar too? - } + case memoryAssign: MemoryAssign => + // *X1 = X2: [[X1]] = ↑a ^ [[X2]] = a where a is a fresh term variable + val X1_star = exprToRegion(memoryAssign.index, cmd) + val X2 = evaluateExpressionWithSSA(memoryAssign.value, constantProp(cmd), cmd, reachingDefs) + // TODO: This is risky as it tries to coerce every value to a region (needed for functionpointer example) + val possibleRegions = exprToRegion(memoryAssign.value, cmd) + + Logger.debug("I am at stmt: " + cmd.label) + Logger.debug("Memory assign: " + memoryAssign) + Logger.debug("X2 is: " + X2) + Logger.debug("PossibleRegions instead of X2 " + possibleRegions) + Logger.debug("Evaluated: " + memoryAssign.value) + Logger.debug("Region " + X1_star) + Logger.debug("Index " + memoryAssign.index) + val alpha = FreshVariable() + X1_star.foreach(x => + unify(ExpressionVariable(x), PointerRef(alpha)) + if (!memoryRegionContents.contains(x)) { + memoryRegionContents.addOne(x -> mutable.Set()) + } + memoryRegionContents(x).addAll(X2) + memoryRegionContents(x).addAll(possibleRegions.filter(r => r != x)) + ) + X2.foreach(x => unify(alpha, ExpressionVariable(x))) + possibleRegions.foreach(x => unify(alpha, ExpressionVariable(x))) + case _ => // do nothing TODO: Maybe LocalVar too? + } + case _ => } + visited.add(node) + InterProcIRCursor.succ(node).foreach(n => visit(n, ())) + } private def unify(t1: Term[StTerm], t2: Term[StTerm]): Unit = { //Logger.info(s"univfying constraint $t1 = $t2\n") diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 43df9ee9e..038d27697 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -152,9 +152,6 @@ trait MemoryRegionAnalysis(val program: Program, } def eval(exp: Expr, env: Set[MemoryRegion], n: Command, subAccess: BigInt): Set[MemoryRegion] = { - Logger.debug(s"evaluating $exp") - Logger.debug(s"env: $env") - Logger.debug(s"n: $n") exp match { case binOp: BinaryExpr => if (spList.contains(binOp.arg1)) { diff --git a/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala b/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala index 47592f082..3bb8c01dd 100644 --- a/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala +++ b/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala @@ -21,23 +21,29 @@ case class ReachingDefinitionsAnalysis(program: Program) { ) val domain: Set[CFGPosition] = Set.empty ++ program + var uniqueDefCounter: Int = 0 + + def nextDef(): Int = { + uniqueDefCounter += 1 + uniqueDefCounter + } /* * Good enough as stmts are unique */ private def generateUniqueDefinition( - variable: Variable - ): Assign = { - Assign(variable, BitVecLiteral(0, 0)) + variable: Variable + ): Assign = { + Assign(variable, Register("Unique_" + nextDef(), 0)) } def transfer(n: CFGPosition, s: (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]])): (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]) = localTransfer(n, s) def localTransfer( - n: CFGPosition, - s: (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]) - ): (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]) = n match { + n: CFGPosition, + s: (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]) + ): (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]) = n match { case cmd: Command => eval(cmd, s) case _ => s @@ -51,7 +57,7 @@ case class ReachingDefinitionsAnalysis(program: Program) { } def eval(cmd: Command, s: (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]) - ): (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]) = cmd match { + ): (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]) = cmd match { case assign: Assign => // do the rhs first (should reset the values for this node to the empty set) // for each variable in the rhs, find the definitions from the lattice lhs and add them to the lattice rhs @@ -71,6 +77,15 @@ case class ReachingDefinitionsAnalysis(program: Program) { transformUses(assume.body.variables, s) case indirectCall: IndirectCall => transformUses(indirectCall.target.variables, s) + case directCall: DirectCall if directCall.target.name == "malloc" => + // assume R0 has been assigned, generate a fake definition + val mallocVar = Register("R0", 64) + val mallocDef = generateUniqueDefinition(mallocVar) + val mallocUseDefs: Map[Variable, Set[Definition]] = Set(mallocVar).foldLeft(Map.empty[Variable, Set[Definition]]) { + case (acc, v) => + acc + (v -> s._1(v)) + } + (s._1 + (Register("R0", 64) -> Set(mallocDef)), mallocUseDefs) case _ => s } } @@ -79,3 +94,8 @@ class ReachingDefinitionsAnalysisSolver(program: Program) extends ReachingDefinitionsAnalysis(program) with SimpleWorklistFixpointSolver[CFGPosition, (Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]], Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]]), ReachingDefinitionsAnalysis#TupleElement] with IRIntraproceduralForwardDependencies + +class InterprocReachingDefinitionsAnalysisSolver(program: Program) + extends ReachingDefinitionsAnalysis(program) + with SimpleWorklistFixpointSolver[CFGPosition, (Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]], Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]]), ReachingDefinitionsAnalysis#TupleElement] + with IRInterproceduralForwardDependencies \ No newline at end of file diff --git a/src/main/scala/analysis/UtilMethods.scala b/src/main/scala/analysis/UtilMethods.scala index a91595cd5..762066e7b 100644 --- a/src/main/scala/analysis/UtilMethods.scala +++ b/src/main/scala/analysis/UtilMethods.scala @@ -12,7 +12,6 @@ import util.Logger * The evaluated expression (e.g. 0x69632) */ def evaluateExpression(exp: Expr, constantPropResult: Map[Variable, FlatElement[BitVecLiteral]]): Option[BitVecLiteral] = { - Logger.debug(s"evaluateExpression: $exp") exp match { case binOp: BinaryExpr => val lhs = evaluateExpression(binOp.arg1, constantPropResult) @@ -73,8 +72,6 @@ def evaluateExpression(exp: Expr, constantPropResult: Map[Variable, FlatElement[ } def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], n: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]): Set[BitVecLiteral] = { - Logger.debug(s"evaluateExpression: $exp") - def apply(op: (BitVecLiteral, BitVecLiteral) => BitVecLiteral, a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = { val res = for { x <- a @@ -143,14 +140,14 @@ def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapper } } -def getDefinition(variable: Variable, node: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]): Set[Assign] = { +def getDefinition(variable: Variable, node: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], noFilter: Boolean = false): Set[Assign] = { val (in, _) = reachingDefs(node) - in.getOrElse(variable, Set()) + if noFilter then in.getOrElse(variable, Set()) else in.getOrElse(variable, Set()).filterNot(_.rhs.variables.forall(_.name.contains("Unique"))) } -def getUse(variable: Variable, node: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]): Set[Assign] = { +def getUse(variable: Variable, node: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], noFiler: Boolean = false): Set[Assign] = { val (_, out) = reachingDefs(node) - out.getOrElse(variable, Set()) + if noFiler then out.getOrElse(variable, Set()) else out.getOrElse(variable, Set()).filterNot(_.rhs.variables.forall(_.name.contains("Unique"))) } def unwrapExpr(expr: Expr): Set[Expr] = { diff --git a/src/main/scala/analysis/VSA.scala b/src/main/scala/analysis/VSA.scala index 03ef8ff60..9bdae0158 100644 --- a/src/main/scala/analysis/VSA.scala +++ b/src/main/scala/analysis/VSA.scala @@ -101,9 +101,6 @@ trait ValueSetAnalysis(program: Program, /** Default implementation of eval. */ def eval(cmd: Command, s: Map[Variable | MemoryRegion, Set[Value]], n: CFGPosition): Map[Variable | MemoryRegion, Set[Value]] = { - Logger.debug(s"eval: $cmd") - Logger.debug(s"state: $s") - Logger.debug(s"node: $n") var m = s cmd match case localAssign: Assign => From 52e0bb74ad9f6a8b0cde26d55deb15a1708ab400 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Tue, 17 Sep 2024 00:04:33 +1000 Subject: [PATCH 052/104] Merging regions at start --- src/main/scala/analysis/MemoryModelMap.scala | 31 ++++++++++++++++++- .../scala/analysis/MemoryRegionAnalysis.scala | 18 +++++++++++ src/main/scala/analysis/RegionInjector.scala | 19 ++++++------ src/main/scala/util/RunUtils.scala | 10 +++--- 4 files changed, 62 insertions(+), 16 deletions(-) diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index 6545a58fe..9242a141f 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -134,7 +134,7 @@ class MemoryModelMap { tableAddress } - def convertMemoryRegions(stackRegionsPerProcedure: mutable.Map[Procedure, mutable.Set[StackRegion]], heapRegions: mutable.Map[DirectCall, HeapRegion], externalFunctions: Map[BigInt, String], globalOffsets: Map[BigInt, BigInt], globalAddresses: Map[BigInt, String], globalSizes: Map[String, Int], procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]]): Unit = { + def convertMemoryRegions(stackRegionsPerProcedure: mutable.Map[Procedure, mutable.Set[StackRegion]], heapRegions: mutable.Map[DirectCall, HeapRegion], mergeRegions: mutable.Set[Set[MemoryRegion]], externalFunctions: Map[BigInt, String], globalOffsets: Map[BigInt, BigInt], globalAddresses: Map[BigInt, String], globalSizes: Map[String, Int], procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]]): Unit = { // map externalFunctions name, value to DataRegion(name, value) and then sort by value val reversedExternalFunctionRgns = (externalFunctions ++ globalAddresses).map((offset, name) => resolveInverseGlobalOffset(name, offset, globalOffsets) -> name) val filteredGlobalOffsets = globalAddresses.filterNot((offset, name) => reversedExternalFunctionRgns.contains(offset)) @@ -162,6 +162,11 @@ class MemoryModelMap { for (heapRegion <- heapRegions.values) { add(rangeStart, heapRegion) } + + // merge regions + for (regions <- mergeRegions) { + uf.bulkUnion(regions) + } } // TODO: push and pop could be optimised by caching the results def pushContext(funName: String): Unit = { @@ -385,6 +390,20 @@ class MemoryModelMap { logRegion(range, region, true) } } + Logger.debug("Stack Root:") + for name <- localStacks.keys do + popContext() + pushContext(name) + Logger.debug(s" Function: $name") + var parentCount = 0 + // get root regions + for ((range, region) <- stackMap) { + val root = uf.find(region) + if root == region then + logRegion(range, root) + parentCount += 1 + } + if parentCount == 0 then Logger.debug(" No root regions") else Logger.debug(s" Parents: $parentCount/${stackMap.size}") Logger.debug("Heap:") for ((range, region) <- heapMap) { logRegion(range, region) @@ -483,6 +502,16 @@ class UnionFind { } } + def bulkUnion(regions: Set[MemoryRegion]): Unit = { + val roots = regions.map(find) + val root = roots.head + for (region <- roots) { + if (region != root) { + union(root, region) + } + } + } + // Check if two regions are in the same set def connected(region1: MemoryRegion, region2: MemoryRegion): Boolean = { find(region1) == find(region2) diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 038d27697..655ee4dc9 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -97,6 +97,12 @@ trait MemoryRegionAnalysis(val program: Program, val procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]] = mutable.Map() var procedureToStackRegions: mutable.Map[Procedure, mutable.Set[StackRegion]] = mutable.Map() var procedureToHeapRegions: mutable.Map[DirectCall, HeapRegion] = mutable.Map() + var memLoadToRegion: mutable.Map[MemoryLoad, MemoryRegion] = mutable.Map() + var mergeRegions: mutable.Set[Set[MemoryRegion]] = mutable.Set() + + def addMergableRegions(regions: Set[MemoryRegion]): Unit = { + mergeRegions.add(regions) + } def addReturnStack(procedure: Procedure, returnRegion: StackRegion): Unit = { procedureToStackRegions.getOrElseUpdate(procedure, mutable.Set.empty).add(returnRegion) @@ -106,6 +112,10 @@ trait MemoryRegionAnalysis(val program: Program, procedureToHeapRegions.put(directCall, returnRegion) } + def addMemLoadRegion(memoryLoad: MemoryLoad, memoryRegion: MemoryRegion): Unit = { + memLoadToRegion.put(memoryLoad, memoryRegion) + } + def reducibleToRegion(binExpr: BinaryExpr, n: Command, subAccess: BigInt): Set[MemoryRegion] = { var reducedRegions = Set.empty[MemoryRegion] binExpr.arg1 match { @@ -231,6 +241,10 @@ trait MemoryRegionAnalysis(val program: Program, } case memAssign: MemoryAssign => val result = eval(memAssign.index, s, cmd, memAssign.size) + if (result.size > 1) { + //throw new Exception(s"Memory load resulted in multiple regions ${result} for mem load $memoryLoad") + addMergableRegions(result) + } regionLattice.lub(s, result) case assign: Assign => stackDetection(assign) @@ -238,6 +252,10 @@ trait MemoryRegionAnalysis(val program: Program, unwrapExpr(assign.rhs).foreach { case memoryLoad: MemoryLoad => val result = eval(memoryLoad.index, s, cmd, memoryLoad.size) + if (result.size > 1) { + //throw new Exception(s"Memory load resulted in multiple regions ${result} for mem load $memoryLoad") + addMergableRegions(result) + } m = regionLattice.lub(m, result) case _ => m } diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index c1beca748..4779045b0 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -256,18 +256,17 @@ class RegionInjector(domain: mutable.Set[CFGPosition], case _ => } } else if (regions.size > 1) { - Logger.warn(s"Mem CMD is: ${cmd}") - Logger.warn(s"Multiple regions found for mem: ${regions}") - mmm.mergeRegions(regions) match { - case stackRegion: StackRegion => - return StackMemory(stackRegion.regionIdentifier, mem.addressSize, mem.valueSize) - case dataRegion: DataRegion => - return SharedMemory(dataRegion.regionIdentifier, mem.addressSize, mem.valueSize) - case _ => - } + throw RuntimeException("Multiple regions found for memory") +// mmm.mergeRegions(regions) match { +// case stackRegion: StackRegion => +// return StackMemory(stackRegion.regionIdentifier, mem.addressSize, mem.valueSize) +// case dataRegion: DataRegion => +// return SharedMemory(dataRegion.regionIdentifier, mem.addressSize, mem.valueSize) +// case _ => +// } } else { Logger.warn(s"Mem CMD is: ${cmd}") - Logger.warn(s"No region found for mem") + Logger.warn(s"No region found for expr ${expr} regions size is ${regions.size}") } mem } diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 4366730d3..ae63a0e88 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -680,13 +680,9 @@ object StaticAnalysis { Logger.info("[!] Running MMM") val mmm = MemoryModelMap() - mmm.convertMemoryRegions(mraSolver.procedureToStackRegions, mraSolver.procedureToHeapRegions, mergedSubroutines, globalOffsets, globalAddresses, globalSizes, mraSolver.procedureToSharedRegions) + mmm.convertMemoryRegions(mraSolver.procedureToStackRegions, mraSolver.procedureToHeapRegions, mraSolver.mergeRegions, mergedSubroutines, globalOffsets, globalAddresses, globalSizes, mraSolver.procedureToSharedRegions) mmm.logRegions() - Logger.info("[!] Injecting regions") - val regionInjector = RegionInjector(domain, IRProgram, constPropResultWithSSA, mmm, reachingDefinitionsAnalysisResults, globalOffsets) - regionInjector.nodeVisitor() - Logger.info("[!] Running Steensgaard") val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResultWithSSA, regionAccessesAnalysisResults, mmm, reachingDefinitionsAnalysisResults, globalOffsets) steensgaardSolver.analyze() @@ -694,6 +690,10 @@ object StaticAnalysis { val memoryRegionContents = steensgaardSolver.getMemoryRegionContents mmm.logRegions(memoryRegionContents) + Logger.info("[!] Injecting regions") + val regionInjector = RegionInjector(domain, IRProgram, constPropResultWithSSA, mmm, reachingDefinitionsAnalysisResults, globalOffsets) + regionInjector.nodeVisitor() + Logger.info("[!] Running VSA") val vsaSolver = ValueSetAnalysisSolver(IRProgram, globalAddresses, externalAddresses, globalOffsets, subroutines, mmm, constPropResult) From fb2aa27d183c66e12d0f1e620d015d658099b168 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Tue, 17 Sep 2024 01:19:34 +1000 Subject: [PATCH 053/104] Update UtilMethods.scala --- src/main/scala/analysis/UtilMethods.scala | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main/scala/analysis/UtilMethods.scala b/src/main/scala/analysis/UtilMethods.scala index 762066e7b..23cddbf52 100644 --- a/src/main/scala/analysis/UtilMethods.scala +++ b/src/main/scala/analysis/UtilMethods.scala @@ -136,6 +136,9 @@ def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapper Logger.debug("getUse: " + getUse(variable, n, reachingDefs)) constantPropResult(RegisterWrapperEqualSets(variable, getUse(variable, n, reachingDefs))) case b: BitVecLiteral => Set(b) + case Repeat(repeats, body) => evaluateExpressionWithSSA(body, constantPropResult, n, reachingDefs) + case MemoryLoad(mem, index, endian, size) => Set.empty + case UninterpretedFunction(name, params, returnType) => Set.empty case _ => throw RuntimeException("ERROR: CASE NOT HANDLED: " + exp + "\n") } } From a791d5fad634f82e398efeac1968b6afa6472ff3 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Tue, 17 Sep 2024 16:12:47 +1000 Subject: [PATCH 054/104] Using simple constantProp --- .../InterprocSteensgaardAnalysis.scala | 73 ++++++++++--------- src/main/scala/analysis/RegionInjector.scala | 58 ++++++++------- src/main/scala/analysis/UtilMethods.scala | 4 +- src/main/scala/util/RunUtils.scala | 8 +- 4 files changed, 76 insertions(+), 67 deletions(-) diff --git a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala index c8bcdf2ca..0b0713ec2 100644 --- a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala +++ b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala @@ -38,7 +38,7 @@ case class RegisterWrapperEqualSets(variable: Variable, assigns: Set[Assign]) { */ class InterprocSteensgaardAnalysis( program: Program, - constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], + constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], regionAccesses: Map[CfgNode, Map[RegisterVariableWrapper, FlatElement[Expr]]], mmm: MemoryModelMap, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], @@ -109,8 +109,9 @@ class InterprocSteensgaardAnalysis( var reducedRegions = Set.empty[MemoryRegion] binExpr.arg1 match { case variable: Variable => - evaluateExpressionWithSSA(binExpr, constantProp(n), n, reachingDefs).foreach { b => - val region = mmm.findDataObject(b.value) + val b = evaluateExpression(binExpr, constantProp(n)) + if (b.isDefined) { + val region = mmm.findDataObject(b.get.value) reducedRegions = reducedRegions ++ region } if (reducedRegions.nonEmpty) { @@ -129,10 +130,10 @@ class InterprocSteensgaardAnalysis( if (memoryRegionContents.contains(f)) { memoryRegionContents(f).foreach { case b: BitVecLiteral => -// val region = mmm.findDataObject(b.value) -// if (region.isDefined) { -// toReturn.addOne(region.get) -// } + // val region = mmm.findDataObject(b.value) + // if (region.isDefined) { + // toReturn.addOne(region.get) + // } case r: MemoryRegion => toReturn.addOne(r) toReturn.remove(f) @@ -147,21 +148,24 @@ class InterprocSteensgaardAnalysis( Logger.debug(ctx) exprToRegion(i.rhs, i) } - val results = evaluateExpressionWithSSA(binExpr.arg2, constantProp(n), n, reachingDefs) - for { - b <- results - r <- regions - } { - r match { - case stackRegion: StackRegion => - val nextOffset = bitVectorOpToBigIntOp(binExpr.op, stackRegion.start, b.value) - reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, BitVecLiteral(nextOffset, 64)), n) - case dataRegion: DataRegion => - val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start), b) - evaluateExpressionWithSSA(nextOffset, constantProp(n), n, reachingDefs).foreach { b2 => - reducedRegions ++= exprToRegion(b2, n) - } - case _ => + val result = evaluateExpression(binExpr.arg2, constantProp(n)) + if (result.isDefined) { + val b = result.get + for { + r <- regions + } { + r match { + case stackRegion: StackRegion => + val nextOffset = bitVectorOpToBigIntOp(binExpr.op, stackRegion.start, b.value) + reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, BitVecLiteral(nextOffset, 64)), n) + case dataRegion: DataRegion => + val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start), b) + val b2 = evaluateExpression(nextOffset, constantProp(n)) + if (b2.isDefined) { + reducedRegions ++= exprToRegion(b2.get, n) + } + case _ => + } } } } @@ -186,15 +190,16 @@ class InterprocSteensgaardAnalysis( mmm.pushContext(IRWalk.procedure(n).name) expr match { // TODO: Stack detection here should be done in a better way or just merged with data case binOp: BinaryExpr if binOp.arg1 == stackPointer => - evaluateExpressionWithSSA(binOp.arg2, constantProp(n), n, reachingDefs).foreach { b => + val b = evaluateExpression(binOp.arg2, constantProp(n)) + if (b.isDefined) { if binOp.arg2.variables.exists { v => v.sharedVariable } then { Logger.debug("Shared stack object: " + b) Logger.debug("Shared in: " + expr) - val regions = mmm.findSharedStackObject(b.value) + val regions = mmm.findSharedStackObject(b.get.value) Logger.debug("found: " + regions) res ++= regions } else { - val region = mmm.findStackObject(b.value) + val region = mmm.findStackObject(b.get.value) if (region.isDefined) { res = res + region.get } @@ -208,9 +213,10 @@ class InterprocSteensgaardAnalysis( res ++= mmm.findStackObject(0) res case v: Variable => - evaluateExpressionWithSSA(expr, constantProp(n), n, reachingDefs).foreach { b => + val b = evaluateExpression(expr, constantProp(n)) + if (b.isDefined) { Logger.debug("BitVecLiteral: " + b) - val region = mmm.findDataObject(b.value) + val region = mmm.findDataObject(b.get.value) if (region.isDefined) { res += region.get } @@ -231,9 +237,10 @@ class InterprocSteensgaardAnalysis( } res case _ => - evaluateExpressionWithSSA(expr, constantProp(n), n, reachingDefs).foreach { b => + val b = evaluateExpression(expr, constantProp(n)) + if (b.isDefined) { Logger.debug("BitVecLiteral: " + b) - val region = mmm.findDataObject(b.value) + val region = mmm.findDataObject(b.get.value) if (region.isDefined) { res += region.get } @@ -267,7 +274,7 @@ class InterprocSteensgaardAnalysis( // X = alloc P: [[X]] = ↑[[alloc-i]] if (directCall.target.name == "malloc") { val alloc = mmm.getHeap(directCall) - val defs = getDefinition(mallocVariable, cmd, reachingDefs, true) + val defs = getDefinition(mallocVariable, cmd, reachingDefs, false) unify(IdentifierVariable(RegisterVariableWrapper(mallocVariable, defs)), PointerRef(AllocVariable(alloc))) } case assign: Assign => @@ -313,7 +320,7 @@ class InterprocSteensgaardAnalysis( case memoryAssign: MemoryAssign => // *X1 = X2: [[X1]] = ↑a ^ [[X2]] = a where a is a fresh term variable val X1_star = exprToRegion(memoryAssign.index, cmd) - val X2 = evaluateExpressionWithSSA(memoryAssign.value, constantProp(cmd), cmd, reachingDefs) + val X2 = evaluateExpression(memoryAssign.value, constantProp(cmd)) // TODO: This is risky as it tries to coerce every value to a region (needed for functionpointer example) val possibleRegions = exprToRegion(memoryAssign.value, cmd) @@ -330,10 +337,10 @@ class InterprocSteensgaardAnalysis( if (!memoryRegionContents.contains(x)) { memoryRegionContents.addOne(x -> mutable.Set()) } - memoryRegionContents(x).addAll(X2) + if X2.isDefined then memoryRegionContents(x).add(X2.get) memoryRegionContents(x).addAll(possibleRegions.filter(r => r != x)) ) - X2.foreach(x => unify(alpha, ExpressionVariable(x))) + if X2.isDefined then unify(alpha, ExpressionVariable(X2.get)) possibleRegions.foreach(x => unify(alpha, ExpressionVariable(x))) case _ => // do nothing TODO: Maybe LocalVar too? } diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index 4779045b0..370804bc3 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -12,7 +12,7 @@ import scala.collection.mutable.ArrayBuffer */ class RegionInjector(domain: mutable.Set[CFGPosition], program: Program, - constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]], + constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], mmm: MemoryModelMap, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], globalOffsets: Map[BigInt, BigInt]) { @@ -68,8 +68,9 @@ class RegionInjector(domain: mutable.Set[CFGPosition], var reducedRegions = Set.empty[MemoryRegion] binExpr.arg1 match { case variable: Variable => - evaluateExpressionWithSSA(binExpr, constantProp(n), n, reachingDefs).foreach { b => - val region = mmm.findDataObject(b.value) + val b = evaluateExpression(binExpr, constantProp(n)) + if (b.isDefined) { + val region = mmm.findDataObject(b.get.value) reducedRegions = reducedRegions ++ region } if (reducedRegions.nonEmpty) { @@ -107,24 +108,26 @@ class RegionInjector(domain: mutable.Set[CFGPosition], println(ctx) exprToRegion(i.rhs, i) } - val results = evaluateExpressionWithSSA(binExpr.arg2, constantProp(n), n, reachingDefs) - for { - b <- results - r <- regions - } { - r match { - case stackRegion: StackRegion => - println(s"StackRegion: ${stackRegion.start}") - println(s"BitVecLiteral: ${b}") - //if (b.size == stackRegion.start.size) { TODO: Double check why this is needed + val result = evaluateExpression(binExpr.arg2, constantProp(n)) + if (result.isDefined) { + val b = result.get + for { + r <- regions + } { + r match { + case stackRegion: StackRegion => + println(s"StackRegion: ${stackRegion.start}") + println(s"BitVecLiteral: ${b}") + //if (b.size == stackRegion.start.size) { TODO: Double check why this is needed val nextOffset = bitVectorOpToBigIntOp(binExpr.op, stackRegion.start, b.value) reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, BitVecLiteral(nextOffset, 64)), n) //} - case dataRegion: DataRegion => - //val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start, globalOffsets), b) - val nextOffset = bitVectorOpToBigIntOp(binExpr.op, dataRegion.start, b.value) - reducedRegions ++= exprToRegion(BitVecLiteral(nextOffset, 64), n) - case _ => + case dataRegion: DataRegion => + //val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start, globalOffsets), b) + val nextOffset = bitVectorOpToBigIntOp(binExpr.op, dataRegion.start, b.value) + reducedRegions ++= exprToRegion(BitVecLiteral(nextOffset, 64), n) + case _ => + } } } } @@ -147,21 +150,22 @@ class RegionInjector(domain: mutable.Set[CFGPosition], mmm.pushContext(IRWalk.procedure(n).name) expr match { // TODO: Stack detection here should be done in a better way or just merged with data case binOp: BinaryExpr if binOp.arg1 == stackPointer => - evaluateExpressionWithSSA(binOp.arg2, constantProp(n), n, reachingDefs).foreach { b => + val b = evaluateExpression(binOp.arg2, constantProp(n)) + if (b.isDefined) { if binOp.arg2.variables.exists { v => v.sharedVariable } then { Logger.debug("Shared stack object: " + b) Logger.debug("Shared in: " + expr) - val regions = mmm.findSharedStackObject(b.value) + val regions = mmm.findSharedStackObject(b.get.value) Logger.debug("found: " + regions) res ++= regions } else { - if (isNegative(b)) { + if (isNegative(b.get)) { val region = mmm.findStackObject(0) if (region.isDefined) { res = res + region.get } } - val region = mmm.findStackObject(b.value) + val region = mmm.findStackObject(b.get.value) if (region.isDefined) { res = res + region.get } @@ -172,9 +176,10 @@ class RegionInjector(domain: mutable.Set[CFGPosition], case v: Variable if v == stackPointer => res ++= mmm.findStackObject(0) case v: Variable => - evaluateExpressionWithSSA(expr, constantProp(n), n, reachingDefs).foreach { b => + val b = evaluateExpression(expr, constantProp(n)) + if (b.isDefined) { Logger.debug("BitVecLiteral: " + b) - val region = mmm.findDataObject(b.value) + val region = mmm.findDataObject(b.get.value) if (region.isDefined) { res += region.get } @@ -206,9 +211,10 @@ class RegionInjector(domain: mutable.Set[CFGPosition], case load: MemoryLoad => // treat as a region res ++= exprToRegion(load.index, n) case _ => - evaluateExpressionWithSSA(expr, constantProp(n), n, reachingDefs).foreach { b => + val b = evaluateExpression(expr, constantProp(n)) + if (b.isDefined) { Logger.debug("BitVecLiteral: " + b) - val region = mmm.findDataObject(b.value) + val region = mmm.findDataObject(b.get.value) if (region.isDefined) { res += region.get } diff --git a/src/main/scala/analysis/UtilMethods.scala b/src/main/scala/analysis/UtilMethods.scala index 23cddbf52..a993dc767 100644 --- a/src/main/scala/analysis/UtilMethods.scala +++ b/src/main/scala/analysis/UtilMethods.scala @@ -143,12 +143,12 @@ def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapper } } -def getDefinition(variable: Variable, node: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], noFilter: Boolean = false): Set[Assign] = { +def getDefinition(variable: Variable, node: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], noFilter: Boolean = true): Set[Assign] = { val (in, _) = reachingDefs(node) if noFilter then in.getOrElse(variable, Set()) else in.getOrElse(variable, Set()).filterNot(_.rhs.variables.forall(_.name.contains("Unique"))) } -def getUse(variable: Variable, node: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], noFiler: Boolean = false): Set[Assign] = { +def getUse(variable: Variable, node: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], noFiler: Boolean = true): Set[Assign] = { val (_, out) = reachingDefs(node) if noFiler then out.getOrElse(variable, Set()) else out.getOrElse(variable, Set()).filterNot(_.rhs.variables.forall(_.name.contains("Unique"))) } diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index ae63a0e88..675316d2b 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -652,10 +652,6 @@ object StaticAnalysis { config.analysisDotPath.foreach(s => writeToFile(cfg.toDot(Output.labeler(regionAccessesAnalysisResults, true), Output.dotIder), s"${s}_RegTo$iteration.dot")) config.analysisResultsPath.foreach(s => writeToFile(printAnalysisResults(cfg, regionAccessesAnalysisResults, iteration), s"${s}_RegTo$iteration.txt")) - Logger.info("[!] Running Constant Propagation with SSA") - val constPropSolverWithSSA = ConstantPropagationSolverWithSSA(IRProgram, reachingDefinitionsAnalysisResults) - val constPropResultWithSSA = constPropSolverWithSSA.analyze() - Logger.info("[!] Running MRA") val mraSolver = MemoryRegionAnalysisSolver(IRProgram, globalAddresses, globalOffsets, mergedSubroutines, constPropResult, ANRResult, RNAResult, regionAccessesAnalysisResults, reachingDefinitionsAnalysisResults) val mraResult = mraSolver.analyze() @@ -684,14 +680,14 @@ object StaticAnalysis { mmm.logRegions() Logger.info("[!] Running Steensgaard") - val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResultWithSSA, regionAccessesAnalysisResults, mmm, reachingDefinitionsAnalysisResults, globalOffsets) + val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResult, regionAccessesAnalysisResults, mmm, reachingDefinitionsAnalysisResults, globalOffsets) steensgaardSolver.analyze() val steensgaardResults = steensgaardSolver.pointsTo() val memoryRegionContents = steensgaardSolver.getMemoryRegionContents mmm.logRegions(memoryRegionContents) Logger.info("[!] Injecting regions") - val regionInjector = RegionInjector(domain, IRProgram, constPropResultWithSSA, mmm, reachingDefinitionsAnalysisResults, globalOffsets) + val regionInjector = RegionInjector(domain, IRProgram, constPropResult, mmm, reachingDefinitionsAnalysisResults, globalOffsets) regionInjector.nodeVisitor() Logger.info("[!] Running VSA") From 79b6259e87c56d816233020ffbc3eee0d74cf330 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Fri, 20 Sep 2024 11:56:38 +1000 Subject: [PATCH 055/104] Fixed regression bug for jumptable2 --- src/main/scala/analysis/MemoryModelMap.scala | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index 9242a141f..7a7b9ea1c 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -58,6 +58,10 @@ class MemoryModelMap { case _ => ??? } + def regionsOverlap(r1: RangeKey, r2: RangeKey): Boolean = { + r1.start <= r2.end && r2.start <= r1.end + } + region match { case s: StackRegion => var currentStackMap = stackMap @@ -87,8 +91,7 @@ class MemoryModelMap { currentDataMap(RangeKey(offset, maxSize(d) - 1)) = d } else { val currentMaxRange = currentDataMap.keys.maxBy(_.end) - val currentMaxRegion = currentDataMap(currentMaxRange) - if (offset <= currentMaxRange.end) { + if (regionsOverlap(currentMaxRange, RangeKey(offset, maxSize(d) - 1))) { currentDataMap.remove(currentMaxRange) // TODO: this removes previously overlapping parent region (jumptable2 example) which favours more fine grained regions currentDataMap(RangeKey(offset, maxSize(d) - 1)) = d } else { @@ -136,7 +139,7 @@ class MemoryModelMap { def convertMemoryRegions(stackRegionsPerProcedure: mutable.Map[Procedure, mutable.Set[StackRegion]], heapRegions: mutable.Map[DirectCall, HeapRegion], mergeRegions: mutable.Set[Set[MemoryRegion]], externalFunctions: Map[BigInt, String], globalOffsets: Map[BigInt, BigInt], globalAddresses: Map[BigInt, String], globalSizes: Map[String, Int], procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]]): Unit = { // map externalFunctions name, value to DataRegion(name, value) and then sort by value - val reversedExternalFunctionRgns = (externalFunctions ++ globalAddresses).map((offset, name) => resolveInverseGlobalOffset(name, offset, globalOffsets) -> name) + val reversedExternalFunctionRgns = externalFunctions.map((offset, name) => resolveInverseGlobalOffset(name, offset, globalOffsets) -> name) val filteredGlobalOffsets = globalAddresses.filterNot((offset, name) => reversedExternalFunctionRgns.contains(offset)) val externalFunctionRgns = (reversedExternalFunctionRgns ++ filteredGlobalOffsets).map((offset, name) => DataRegion(name, offset, (globalSizes.getOrElse(name, 1).toDouble/8).ceil.toInt)) From 65c672a20b235bd51e3bd6e3e4e59d8c0b424227 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 23 Sep 2024 12:22:21 +1000 Subject: [PATCH 056/104] fixes --- src/main/scala/analysis/DSAUtility.scala | 102 +++++++++++++---------- src/main/scala/analysis/Local.scala | 7 +- src/test/scala/DSATest.scala | 55 ++++++++---- 3 files changed, 99 insertions(+), 65 deletions(-) diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index 940250eba..a94cb2d29 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -42,6 +42,9 @@ class DSG(val proc: Procedure, val nodes: mutable.Set[DSN] = mutable.Set() + // Points-to relations in this graph, only updated once the analysis is done, + val pointsto: mutable.Map[DSC, Slice] = mutable.Map() + // represent callees in proc val callsites: mutable.Set[CallSite] = mutable.Set() @@ -152,7 +155,7 @@ class DSG(val proc: Procedure, globalMapping.update(AddressRange(relocatedAddress, relocatedAddress + 8), Field(node, 0)) node - node.cells(field)._pointee = Some(Slice(isGlobal(address).get._2._1.cells(0), 0)) + node.cells(field).pointee = Some(Slice(isGlobal(address).get._2._1.cells(0), 0)) address = relocatedAddress } ) @@ -173,7 +176,7 @@ class DSG(val proc: Procedure, var global: Option[(AddressRange, Field)] = None breakable { for (elem <- globalMapping) { - val range = elem._1 // TODO + val range = elem._1 if address >= range.start && (address < range.end || (range.start == range.end && range.end == address)) then global = Some(elem) break @@ -196,6 +199,7 @@ class DSG(val proc: Procedure, */ def collectNodes = nodes.clear() + pointsto.clear() nodes.addAll(formals.values.map(_._1.node.get).map(n => find(n).node)) varToCell.values.foreach( value => nodes.addAll(value.values.map(_._1.node.get).map(n => find(n).node)) @@ -208,11 +212,13 @@ class DSG(val proc: Procedure, while queue.nonEmpty do val cur = queue.dequeue() cur.cells.foreach { - case (offset: BigInt, cell: DSC) if cell._pointee.isDefined => + case (offset: BigInt, cell: DSC) if cell.pointee.isDefined => val node = find(cell.getPointee.node).node if !nodes.contains(node) then nodes.add(node) queue.enqueue(node) + assert(!pointsto.contains(cell)) + pointsto.update(cell, find(cell.getPointee)) case _ => } @@ -238,7 +244,9 @@ class DSG(val proc: Procedure, s ++ mapping.foldLeft(Set[DotStruct]()) { (k, n) => val variable = n._1.name - k + DotStruct(s"SSA_${pos.toShortString.slice(1, 9)}_$variable", s"SSA_${pos}_$variable", None, false) + k + DotStruct(s"SSA_${ + if pos.toShortString.startsWith("%") then pos.toShortString.drop(1) else pos.toShortString + }_$variable", s"SSA_${pos}_$variable", None, false) } } @@ -255,20 +263,14 @@ class DSG(val proc: Procedure, s + DotStruct(s"Stack_$offset", s"Stack_$offset", None) } - var arrows = nodes.foldLeft(Set[StructArrow]()) { - (s, node) => - s ++ node.cells.foldLeft(Set[StructArrow]()) { - (k, c) => - val offset = c._1 - val cell = c._2 - if cell._pointee.isDefined then - val pointee = find(cell.getPointee) - s + StructArrow(DotStructElement(node.id.toString, Some(offset.toString)), DotStructElement(pointee.node.id.toString, Some(pointee.cell.offset.toString)), pointee.internalOffset.toString) - else - s - } - } + var arrows = + pointsto.foldLeft(Set[StructArrow]()) { + case (s: Set[StructArrow], (cell: DSC, pointee: Slice)) => + val pointerID = cell.node.get.id.toString + val pointerOffset = cell.offset.toString + s + StructArrow(DotStructElement(pointerID, Some(pointerOffset)), DotStructElement(pointee.node.id.toString, Some(pointee.cell.offset.toString)), pointee.internalOffset.toString) + } arrows ++= formals.foldLeft(Set[StructArrow]()) { (s, n) => @@ -285,7 +287,9 @@ class DSG(val proc: Procedure, (k, n) => val variable = n._1.name val value = find(n._2) - k + StructArrow(DotStructElement(s"SSA_${pos.toShortString.slice(1, 9)}_$variable", None), DotStructElement(value.node.id.toString, Some(value.cell.offset.toString)), value.internalOffset.toString) + k + StructArrow(DotStructElement(s"SSA_${ + if pos.toShortString.startsWith("%") then pos.toShortString.drop(1) else pos.toShortString + }_$variable", None), DotStructElement(value.node.id.toString, Some(value.cell.offset.toString)), value.internalOffset.toString) } } @@ -337,7 +341,7 @@ class DSG(val proc: Procedure, var cell = node.cells.tail.foldLeft(adjust(node.cells.head._2.getPointee)) { (c, field) => val cell = field._2 - val pointee = cell._pointee + val pointee = cell.pointee if pointee.isDefined && adjust(cell.getPointee) == cell then pointToItself = true c @@ -354,7 +358,7 @@ class DSG(val proc: Procedure, cell = mergeCells(cell, collapedCell) - collapedCell._pointee = Some(Slice(collapedCell, 0)) + collapedCell.pointee = Some(Slice(collapedCell, 0)) assert(collapsedNode.cells.size == 1) @@ -398,14 +402,14 @@ class DSG(val proc: Procedure, */ def mergeNeighbours(cell1: DSC, cell2: DSC): DSC = require(cell1.node.equals(cell2.node) && cell1.offset < cell2.offset) - if cell2._pointee.isDefined then - if cell1._pointee.isDefined then + if cell2.pointee.isDefined then + if cell1.pointee.isDefined then val slice1 = cell1.getPointee val slice2 = cell2.getPointee val result = mergeCells(adjust(slice1), adjust(slice2)) - cell1._pointee = Some(Slice(result, slice2.internalOffset.max(slice1.internalOffset))) + cell1.pointee = Some(Slice(result, slice2.internalOffset.max(slice1.internalOffset))) else - cell1._pointee = cell2._pointee + cell1.pointee = cell2.pointee val internalOffsetChange = cell2.offset - cell1.offset cell2.node.get.cells.remove(cell2.offset) cell1.growSize((cell2.offset - cell1.offset) + cell2.largestAccessedSize) // might cause another collapse @@ -477,15 +481,15 @@ class DSG(val proc: Procedure, node2.children += (node1 -> 0) node2.allocationRegions.addAll(node1.allocationRegions) // add regions and flags of node 1 to node 2 node2.flags.join(node1.flags) - if node1.cells(0)._pointee.isDefined then // merge the pointees of the two collapsed (single cell) nodes - if node2.cells(0)._pointee.isDefined then + if node1.cells(0).pointee.isDefined then // merge the pointees of the two collapsed (single cell) nodes + if node2.cells(0).pointee.isDefined then val slice1 = node1.cells(0).getPointee val slice2 = node2.cells(0).getPointee val result = mergeCells(adjust(slice1), adjust(slice2)) - node2.cells(0)._pointee = Some(Slice(result, slice1.internalOffset.max(slice2.internalOffset))) + node2.cells(0).pointee = Some(Slice(result, slice1.internalOffset.max(slice2.internalOffset))) else - node2.cells(0)._pointee = node1.cells(0)._pointee -// node1.cells(0)._pointee = None + node2.cells(0).pointee = node1.cells(0).pointee +// node1.cells(0).pointee = None // replace(node1.cells(0), node2.cells(0), 0) solver.unify(node1.term, node2.term, 0) node2.cells(0) @@ -552,7 +556,7 @@ class DSG(val proc: Procedure, (set, cell) => // collect outgoing edges - if cell._pointee.isDefined then + if cell.pointee.isDefined then val pointee = cell.getPointee set + pointee else @@ -560,15 +564,13 @@ class DSG(val proc: Procedure, } // replace outgoing edges if outgoing.size == 1 then - collapsedCell._pointee = Some(outgoing.head) + collapsedCell.pointee = Some(outgoing.head) else if outgoing.size > 1 then val result = outgoing.tail.foldLeft(adjust(outgoing.head)){ (result, pointee) => mergeCells(result, adjust(pointee)) } - - - collapsedCell._pointee = Some(deadjust(result)) + collapsedCell.pointee = Some(deadjust(result)) } solver.unify(node1.term, resultNode.term, 0) @@ -704,7 +706,7 @@ class DSG(val proc: Procedure, val node = queue.dequeue() node.cells.foreach { - case (offset: BigInt, cell: DSC) if cell._pointee.isDefined => + case (offset: BigInt, cell: DSC) if cell.pointee.isDefined => val id = cell.node.get.id val pointee = find(cell.getPointee) val pointeeId = pointee.node.id @@ -712,7 +714,7 @@ class DSG(val proc: Procedure, queue.enqueue(pointee.node) val newNode = pointee.node.cloneSelf(newGraph) idToNode.update(pointeeId, newNode) - idToNode(id).cells(cell.offset)._pointee = Some(Slice(idToNode(pointeeId).cells(pointee.offset), pointee.internalOffset)) + idToNode(id).cells(cell.offset).pointee = Some(Slice(idToNode(pointeeId).cells(pointee.offset), pointee.internalOffset)) case _ => @@ -864,7 +866,7 @@ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCount cells.foreach { case (offset: BigInt, cell: DSC) => - if cell._pointee.isDefined then + if cell.pointee.isDefined then val pointee = cell.getPointee pointee.node.cloneNode(from, to) // to.pointTo.update(cell, pointee) TODO check this is not necessary @@ -888,23 +890,26 @@ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCount * @param node the node this cell belongs to * @param offset the offset of the cell */ -case class DSC(node: Option[DSN], offset: BigInt) +class DSC(val node: Option[DSN], val offset: BigInt) { var largestAccessedSize: BigInt = 0 - var _pointee : Option[Slice] = None + // the cell's pointee + var pointee : Option[Slice] = None + // returns the cell's pointee if it has one. + // if not it will create a placeholder, set it as the pointee of this cell and return it def getPointee : Slice = - if _pointee.isEmpty then + if pointee.isEmpty then val node = DSN(Some(this.node.get.graph.get)) - _pointee = Some(Slice(node.cells(0), 0)) + pointee = Some(Slice(node.cells(0), 0)) else - val graph = _pointee.get.node.graph.get - val resolvedPointee = graph.find(graph.adjust(_pointee.get)) + val graph = pointee.get.node.graph.get + val resolvedPointee = graph.find(graph.adjust(pointee.get)) - _pointee = Some(graph.deadjust(resolvedPointee)) - _pointee.get + pointee = Some(graph.deadjust(resolvedPointee)) + pointee.get def growSize(size: BigInt): Boolean = if size > largestAccessedSize then @@ -912,6 +917,13 @@ case class DSC(node: Option[DSN], offset: BigInt) true else false + + override def equals(obj: Any): Boolean = + obj match + case cell:DSC => this.node.equals(cell.node) && this.offset.equals(cell.offset) + case _ => false + + override def toString: String = s"Cell(${if node.isDefined then node.get.toString else "NONE"}, $offset)" } diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index 464f16364..9500b0671 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -3,6 +3,7 @@ package analysis import analysis.BitVectorEval.{bv2SignedInt, isNegative} import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Endian, Expr, Extract, IntraProcIRCursor, MemoryAssign, MemoryLoad, Procedure, Register, Variable, ZeroExtend, computeDomain, toShortString} import specification.{ExternalFunction, SpecGlobal, SymbolTableEntry} +import util.writeToFile import scala.util.control.Breaks.{break, breakable} import java.math.BigInteger @@ -167,7 +168,7 @@ class Local( val internalOffset = t._2 val node = t._1.node.get val cell = graph.find(node.getCell(offset + internalOffset)) - if cell._pointee.isDefined && graph.find(cell.getPointee._1).equals(result) then + if cell.pointee.isDefined && graph.find(cell.getPointee._1).equals(result) then graph.selfCollapse(node) // assert(graph.pointTo.contains(node.getCell(offset))) TODO result = graph.find(graph.find(node.getCell(offset)).getPointee._1) @@ -328,9 +329,9 @@ class Local( graph.mergeCells(graph.adjust(slice), c) } - print("") - case _ => + + writeToFile(graph.toDot, "test.dot") } def analyze(): DSG = val domain = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString) diff --git a/src/test/scala/DSATest.scala b/src/test/scala/DSATest.scala index efa08b2ec..240f7e329 100644 --- a/src/test/scala/DSATest.scala +++ b/src/test/scala/DSATest.scala @@ -58,7 +58,23 @@ class DSATest extends AnyFunSuite, TestUtil { assert(dsg.adjust(stack8.getPointee).equals(dsg.adjust(dsg.formals(R30)))) // R31 + 8 points to the link register - assert(dsg.adjust(stack32.getPointee).equals(stack24)) // R31 + 32 points to R31 + 24, 00000457 + /* + Position 0000044F: tmp1 := R31 + 24 // Ev(tmp1) = new Node(R31 + 24).0 + implicit normalisation: tmp2 := R31 + 32 // Ev(tmp2) = new Node(R31 + 32).0 + Position 00000457: *tmp2 := tmp1 // merge(Ev(tmp1), E(Ev(tmp2))) + Therefore, Node(R31 + 32).0.pointee is merged with Node(R31 + 24).0, making E(Node(R31 + 32).0) == Node(R31 + 24).0 + at position 00000446 Ev(R0) == Malloc_Node == E(Node(R31 + 24).0) we have + mem := mem with [R31 + 0x20, el]:u64 <- R0 + *(R31 + 32) := R0 + merge(Ev(R0), E(Ev(R31+ 32)) + == merge(E(Node(R31 + 24).0), E(Node(R31 + 32).0)) + == merge(E(Node(R31 + 24).0), Node(R31 + 24).0) + which merges make the stack + 24 point to itself + */ + + + // R31 + 32 points to R31 + 24, later set to point to heap but it should point to both ( + assert(dsg.adjust(stack32.getPointee).equals(stack24)) assert(stack24.node.get.collapsed) // 00000497 collapses stack24 concatenation is currently unhandled, any objects referenced in an unhandled operation are collapsed assert(dsg.adjust(stack24.getPointee).equals(stack24)) // 00000466, R31 + 32 and R31 + 24 pointees are merged @@ -250,7 +266,7 @@ class DSATest extends AnyFunSuite, TestUtil { assert(stack32.offset == 16) // stack40 points to a different offset of stack24's node but the analysis can't determine that in the local phase - assert(stack40._pointee.isDefined) + assert(stack40.pointee.isDefined) assert(!stack40.node.get.equals(stack24.node.get)) } @@ -283,7 +299,7 @@ class DSATest extends AnyFunSuite, TestUtil { test("internal merge") { // this is an internal merge (two cells of the same node overlap and are merged together) - val mem = SharedMemory("mem", 10000, 10000) + val mem = SharedMemory("mem", 64, 8) val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) var program = prog( @@ -291,8 +307,8 @@ class DSATest extends AnyFunSuite, TestUtil { block("operations", locAssign1, // R6 = R0 + 4 locAssign2, // R7 = R0 + 5 - MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), - MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), + MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), // *R7 = R1, (*R6 + 1) = R1 + MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), // *R6 = R2 ret ) ) @@ -303,22 +319,27 @@ class DSATest extends AnyFunSuite, TestUtil { val results = RunUtils.staticAnalysis(StaticAnalysisConfig(None, None, None), IRContext(Set.empty, Set.empty, Set.empty, Map.empty, Specification(Set(), Set(), Map(), List(), List(), List(), Set()), program)) val dsg: DSG = results.locals.get(program.mainProcedure) - // object of formals R1 and R2 are written to overlapping fields of the same node? causing them to be merged together - assert(dsg.adjust(dsg.formals(R1)).equals(dsg.adjust(dsg.formals(R2)))) - // R6 and R7 address the same cell + + // R6 and R7 address the same cell (overlapping cells in the same node that are merged) assert(dsg.find(dsg.varToCell(locAssign1)(R6)).cell.equals(dsg.find(dsg.varToCell(locAssign2)(R7)).cell)) + + // outgoing edges of R6 and R7 are unified since the cells are merged + // object of formals R1 and R2 are written to overlapping fields of the same node (R6, R7); causing them to be merged together + assert(dsg.adjust(dsg.formals(R1)).equals(dsg.adjust(dsg.formals(R2)))) // however, they address different internal offets in those cells assert(dsg.find(dsg.varToCell(locAssign1)(R6)).internalOffset == 0) assert(dsg.find(dsg.varToCell(locAssign2)(R7)).internalOffset == 1) - // R6 points to input R1 - assert(dsg.adjust(dsg.varToCell(locAssign1)(R6))._pointee.isDefined) + + // Since R6 and R7 are pointing to the same cell (R1 and R2) + // R6 (or R7)'s pointee should be the same as R1 and R2 + assert(dsg.adjust(dsg.varToCell(locAssign1)(R6)).pointee.isDefined) assert(dsg.adjust(dsg.adjust(dsg.varToCell(locAssign1)(R6)).getPointee).equals(dsg.adjust(dsg.formals(R1)))) } test("offsetting from middle of cell to a new cell") { - val mem = SharedMemory("mem", 10000, 10000) + val mem = SharedMemory("mem", 64, 8) val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) val locAssign3 = Assign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(8, 64)), Some("00005")) @@ -347,8 +368,8 @@ class DSATest extends AnyFunSuite, TestUtil { test("offsetting from middle of cell to the same cell") { // similar to above except instead of creating new cell the last assign - // points R7's cell at an internal offset of 8 - val mem = SharedMemory("mem", 10000, 10000) + // points R5's cell at an internal offset of 8 + val mem = SharedMemory("mem", 64, 8) val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) val locAssign3 = Assign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(7, 64)), Some("00005")) @@ -377,13 +398,13 @@ class DSATest extends AnyFunSuite, TestUtil { assert(dsg.find(dsg.varToCell(locAssign1)(R6)).internalOffset == 0) assert(dsg.find(dsg.varToCell(locAssign2)(R7)).internalOffset == 1) assert(dsg.find(dsg.varToCell(locAssign3)(R5)).internalOffset == 8) - assert(dsg.find(dsg.varToCell(locAssign1)(R6)).cell._pointee.isDefined) + assert(dsg.find(dsg.varToCell(locAssign1)(R6)).cell.pointee.isDefined) assert(dsg.find(dsg.find(dsg.varToCell(locAssign1)(R6)).cell.getPointee).equals(dsg.find(dsg.formals(R1)))) } test("internal offset transfer") { // this is a test to check assignments transfer internal offset of slices. - val mem = SharedMemory("mem", 10000, 10000) + val mem = SharedMemory("mem", 64, 8) val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) val locAssign3 = Assign(R5, R7, Some("00005")) @@ -545,7 +566,7 @@ class DSATest extends AnyFunSuite, TestUtil { assert(stack24.node.get.equals(stack32.node.get)) assert(stack24.offset == 0) assert(stack32.offset == 16) - assert(stack40._pointee.isDefined) + assert(stack40.pointee.isDefined) assert(stack40.node.get.equals(stack24.node.get)) assert(stack40.offset == 32) assert(dsg.find(dsg.stackMapping(40).cells(0)).getPointee.internalOffset == 0) @@ -695,7 +716,7 @@ class DSATest extends AnyFunSuite, TestUtil { assert(stack24.node.get.equals(stack32.node.get)) assert(stack24.offset == 0) assert(stack32.offset == 16) - assert(stack40._pointee.isDefined) + assert(stack40.pointee.isDefined) assert(stack40.node.get.equals(stack24.node.get)) assert(stack40.offset == 32) assert(dsg.find(dsg.stackMapping(40).cells(0)).getPointee.internalOffset == 0) From e6ee6800d29a2245262f588cc04355ba483a6c46 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 23 Sep 2024 16:22:34 +1000 Subject: [PATCH 057/104] fixes --- src/main/scala/analysis/DSAUtility.scala | 63 ++++++++++--------- src/main/scala/analysis/Local.scala | 24 +++---- .../analysis/SymbolicAccessAnalysis.scala | 2 +- .../analysis/solvers/DSAUnionFindSolver.scala | 60 +++++++++--------- src/main/scala/util/RunUtils.scala | 6 +- src/test/scala/DSATest.scala | 2 +- src/test/scala/LiveVarsAnalysisTests.scala | 1 + 7 files changed, 80 insertions(+), 78 deletions(-) diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index a94cb2d29..b161636f9 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -1,6 +1,6 @@ package analysis -import analysis.solvers.{DSAUnionFindSolver, UnionFindSolver, Var} +import analysis.solvers.{DSAUniTerm, DSAUnionFindSolver, UnionFindSolver, Var} import cfg_visualiser.{DotStruct, DotStructElement, StructArrow, StructDotGraph} import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IntraProcIRCursor, Literal, Memory, MemoryAssign, MemoryLoad, Procedure, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, begin, computeDomain, toShortString} import specification.{ExternalFunction, SpecGlobal, SymbolTableEntry} @@ -67,7 +67,8 @@ class DSG(val proc: Procedure, * @return updated stack mapping */ private def visitStackAccess(pos: CFGPosition, index: Expr, size: Int, m: Map[BigInt, DSN]) : Map[BigInt, DSN] = - val byteSize = (size.toDouble / 8).ceil.toInt + assert(size % 8 == 0) + val byteSize = size / 8 index match case BinaryExpr(op, arg1: Variable, arg2) if varToSym.contains(pos) && varToSym(pos).contains(arg1) && evaluateExpression(arg2, constProp(pos)).isDefined => @@ -172,13 +173,14 @@ class DSG(val proc: Procedure, // determine if an address is a global and return the corresponding global if it is. - def isGlobal(address: BigInt): Option[(AddressRange, Field)] = - var global: Option[(AddressRange, Field)] = None + def isGlobal(address: BigInt): Option[DSAGlobal] = + var global: Option[DSAGlobal] = None breakable { for (elem <- globalMapping) { val range = elem._1 + val field = elem._2 if address >= range.start && (address < range.end || (range.start == range.end && range.end == address)) then - global = Some(elem) + global = Some(DSAGlobal(range, field)) break } } @@ -325,8 +327,8 @@ class DSG(val proc: Procedure, def collapseNode(n: DSN): DSN = - val (term, offset) = solver.find(n.term) - val node: DSN = term.asInstanceOf[Derm].node + val (term, offset) = solver.findWithOffset(n.term) + val node: DSN = term.node if !(n.collapsed || find(n).node.collapsed) then @@ -412,12 +414,12 @@ class DSG(val proc: Procedure, cell1.pointee = cell2.pointee val internalOffsetChange = cell2.offset - cell1.offset cell2.node.get.cells.remove(cell2.offset) - cell1.growSize((cell2.offset - cell1.offset) + cell2.largestAccessedSize) // might cause another collapse + cell1.growSize((cell2.offset - cell1.offset).toInt + cell2.largestAccessedSize) // might cause another collapse cell1 // private val parent = mutable.Map[DSC, DSC]() - val solver: DSAUnionFindSolver[UniTerm] = DSAUnionFindSolver() + val solver: DSAUnionFindSolver = DSAUnionFindSolver() /** * wrapper for find functionality of the union-find @@ -425,8 +427,8 @@ class DSG(val proc: Procedure, * @return a field which is the tuple (parent node of the input node, starting offset of the input node in its parent) */ def find(node: DSN) : Field = - val (n, offset) = solver.find(node.term) - val resultNode = n.asInstanceOf[Derm].node + val (n, offset) = solver.findWithOffset(node.term) + val resultNode = n.node Field(resultNode, offset) /** @@ -515,7 +517,7 @@ class DSG(val proc: Procedure, }).sortBy(_._1) var lastOffset: BigInt = -1 - var lastAccess: BigInt = -1 + var lastAccess: Int = -1 // create a new node to represent the unified node val resultNode = DSN(Some(this)) // add nodes flags and regions to the resulting node @@ -536,12 +538,12 @@ class DSG(val proc: Procedure, // compute the cells present in the resulting unified node // a mapping from offsets to the set of old cells which are merged to form a cell in the new unified node // values in the mapping also include the largest access size so far computed for each resulting cell - val resultCells: mutable.Map[BigInt, (Set[DSC], BigInt)] = mutable.Map() + val resultCells: mutable.Map[BigInt, (Set[DSC], Int)] = mutable.Map() cells.foreach { case (offset: BigInt, cell: DSC) => if (lastOffset + lastAccess > offset) || lastOffset == offset then // includes this cell if (offset - lastOffset) + cell.largestAccessedSize > lastAccess then - lastAccess = (offset - lastOffset) + cell.largestAccessedSize + lastAccess = (offset - lastOffset).toInt + cell.largestAccessedSize resultCells.update(lastOffset, (resultCells(lastOffset)._1 + cell, lastAccess)) else lastOffset = offset @@ -550,7 +552,7 @@ class DSG(val proc: Procedure, } resultCells.foreach { - case (offset: BigInt, (cells: Set[DSC], largestAccess: BigInt)) => + case (offset: BigInt, (cells: Set[DSC], largestAccess: Int)) => val collapsedCell = resultNode.addCell(offset, largestAccess) val outgoing: Set[Slice] = cells.foldLeft(Set[Slice]()){ (set, cell) => @@ -582,8 +584,8 @@ class DSG(val proc: Procedure, def adjust(cell: DSC, internalOffset: BigInt): DSC = - val link = solver.find(cell.node.get.term) - val node = link._1.asInstanceOf[Derm].node + val link = solver.findWithOffset(cell.node.get.term) + val node = link._1.node val linkOffset = link._2 node.addCell(cell.offset + internalOffset + linkOffset, 0) @@ -776,7 +778,7 @@ class Flags() { */ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCounter.getCounter) { - val term = Derm(this) + val term = DSAUniTerm(this) val children : mutable.Map[DSN, BigInt] = mutable.Map() // var collapsed = false var flags = Flags() @@ -810,7 +812,7 @@ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCount cells(offset) - def addCell(offset: BigInt, size: BigInt) : DSC = + def addCell(offset: BigInt, size: Int) : DSC = this.updateSize(offset + size) if collapsed then cells(0) @@ -892,7 +894,7 @@ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCount */ class DSC(val node: Option[DSN], val offset: BigInt) { - var largestAccessedSize: BigInt = 0 + var largestAccessedSize: Int = 0 // the cell's pointee var pointee : Option[Slice] = None @@ -911,7 +913,7 @@ class DSC(val node: Option[DSN], val offset: BigInt) pointee = Some(graph.deadjust(resolvedPointee)) pointee.get - def growSize(size: BigInt): Boolean = + def growSize(size: Int): Boolean = if size > largestAccessedSize then largestAccessedSize = size true @@ -957,6 +959,15 @@ class CallSite(val call: DirectCall, val graph: DSG) { } } + + +case class DSAGlobal(addressRange: AddressRange, field: Field) { + def start: BigInt = addressRange.start + def end: BigInt = addressRange.end + def node: DSN = field.node + def offset: BigInt = field.offset +} + // global address range case class AddressRange(start: BigInt, end: BigInt) @@ -979,16 +990,6 @@ def unwrapPaddingAndSlicing(expr: Expr): Expr = -/** Terms used in unification. - */ -sealed trait UniTerm - -/** A term variable in the solver - */ -case class Derm(node: DSN) extends UniTerm with Var[UniTerm] { - - override def toString: String = s"Term{${node}}" -} diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index 9500b0671..56cd767dc 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -105,7 +105,7 @@ class Local( val global = graph.isGlobal(value.get.value) if global.isDefined then val address = value.get.value - val (range: AddressRange, Field(node, internal)) = global.get + val DSAGlobal(range: AddressRange, Field(node, internal)) = global.get val offset = address - range.start node.addCell(internal + offset, size) graph.selfCollapse(node) @@ -244,17 +244,13 @@ class Local( graph.mergeCells(lhsCell, stack.get) else expr match - case BinaryExpr(op, arg1: Variable, arg2) => // Rx = Rx + c + case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) => // Rx = Rx + c val arg2Offset = evaluateExpression(arg2, constProp(n)) if op.equals(BVADD) && arg1.equals(stackPointer) && arg2Offset.isDefined && isNegative(arg2Offset.get) then () // the stack is handled prior to this -// val size = bv2SignedInt(arg2Offset.get) -// val node = DSN(Some(graph)) -// node.allocationRegions.add(StackLocation("Stack_"+proc.name, proc, -size)) -// node.flags.stack = true -// graph.mergeCells(lhsCell, node.cells(0)) - else if /*varToSym.contains(n) && varToSym(n).contains(arg1) && */ arg2Offset.isDefined then + + else if /*varToSym.contains(n) && varToSym(n).contains(arg1) && */ arg2Offset.isDefined then // merge lhs with cell(s) corresponding to (arg1 + arg2) where arg1 is cell and arg2 is an offset val offset = evaluateExpression(arg2, constProp(n)).get.value visitPointerArithmeticOperation(n, lhsCell, arg1, 0, false, offset) @@ -266,7 +262,8 @@ class Local( visitPointerArithmeticOperation(n, lhsCell, arg, 0) case MemoryLoad(mem, index, endian, size) => // Rx = Mem[Ry], merge Rx and pointee of Ry (E(Ry)) - val byteSize = (size.toDouble/8).ceil.toInt + assert(size % 8 == 0) + val byteSize = size/8 lhsCell.node.get.flags.read = true global = isGlobal(index, n, byteSize) stack = isStack(index, n) @@ -276,7 +273,7 @@ class Local( graph.mergeCells(lhsCell, graph.adjust(graph.find(stack.get).getPointee)) else index match - case BinaryExpr(op, arg1: Variable, arg2) => + case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) => evaluateExpression(arg2, constProp(n)) match case Some(v) => // assert(varToSym(n).contains(arg1)) @@ -284,6 +281,7 @@ class Local( visitPointerArithmeticOperation(n, lhsCell, arg1, byteSize, true, offset) case None => // assert(varToSym(n).contains(arg1)) + // collapse the result visitPointerArithmeticOperation(n, lhsCell, arg1, byteSize, true, 0, true) case arg: Variable => // assert(varToSym(n).contains(arg)) @@ -297,7 +295,8 @@ class Local( val value: Variable = unwrapPaddingAndSlicing(expr).asInstanceOf[Variable] val index: Expr = unwrapPaddingAndSlicing(ind) reachingDefs(n)(value).foreach(visit) - val byteSize = (size.toDouble/8).ceil.toInt + assert(size % 8 == 0) + val byteSize = size / 8 val global = isGlobal(index, n, byteSize) val stack = isStack(index, n) val addressPointee: DSC = @@ -307,7 +306,7 @@ class Local( graph.adjust(graph.find(stack.get).getPointee) else index match - case BinaryExpr(op, arg1: Variable, arg2) => + case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) => evaluateExpression(arg2, constProp(n)) match case Some(v) => // assert(varToSym(n).contains(arg1)) @@ -315,6 +314,7 @@ class Local( visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg1, byteSize, true, offset) case None => // assert(varToSym(n).contains(arg1)) + // collapse the results visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg1, byteSize, true, 0, true) case arg: Variable => // assert(varToSym(n).contains(arg)) diff --git a/src/main/scala/analysis/SymbolicAccessAnalysis.scala b/src/main/scala/analysis/SymbolicAccessAnalysis.scala index 0cd79400e..16720a747 100644 --- a/src/main/scala/analysis/SymbolicAccessAnalysis.scala +++ b/src/main/scala/analysis/SymbolicAccessAnalysis.scala @@ -95,7 +95,7 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem case Assign(variable, rhs, maybeString: Option[String]) => val expr = unwrapPaddingAndSlicing(rhs) expr match - case BinaryExpr(op, arg1: Variable, arg2) => + case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) => evaluateExpression(arg2, constProp(n)) match case Some(v) => if op.equals(BVADD) && arg1.equals(stackPointer) && isNegative(v) then diff --git a/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala b/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala index 62d6a5082..85901e3f3 100644 --- a/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala +++ b/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala @@ -1,42 +1,41 @@ package analysis.solvers +import analysis.DSN + import scala.collection.mutable -class DSAUnionFindSolver[A] { +class DSAUnionFindSolver extends UnionFindSolver[UniTerm] { + + val parent = mutable.Map[DSAUniTerm, DSAUniTerm]() + val offsets = mutable.Map[DSAUniTerm, BigInt]() - val parent = mutable.Map[Term[A], Term[A]]() - val offsets = mutable.Map[Term[A], BigInt]() + override def unify(t1: Term[UniTerm], t2: Term[UniTerm]): Unit = + unify(t1.asInstanceOf[DSAUniTerm], t2.asInstanceOf[DSAUniTerm], 0) - def unify(t1: Term[A], t2: Term[A], offset: BigInt): Unit = { + // offset is the offset at which + def unify(t1: DSAUniTerm, t2: DSAUniTerm, offset: BigInt): Unit = { mkSet(t1) mkSet(t2) - val rep1 = find(t1)._1 - val rep2 = find(t2)._1 + val rep1 = findWithOffset(t1)._1 + val rep2 = findWithOffset(t2)._1 if (rep1 == rep2) return (rep1, rep2) match { - case (v1: Var[A], v2: Var[A]) => - mkUnion(v1, v2, offset) - case (v1: Var[A], t2: Term[A]) => - mkUnion(v1, t2, offset) - case (t1: Term[A], v2: Var[A]) => - mkUnion(v2, t1, offset) - case (f1: Cons[A], f2: Cons[A]) if f1.doMatch(f2) => - mkUnion(f1, f2, offset) - f1.args.zip(f2.args).foreach { case (a1, a2) => - unify(a1, a2, offset) - } + case (t1: DSAUniTerm, t2: DSAUniTerm) => + mkUnion(t1, t2, offset) case (x, y) => throw new UnificationFailure(s"Cannot unify $t1 and $t2 (with representatives $x and $y)") } } - def find(t: Term[A]): (Term[A], BigInt) = { + + + def findWithOffset(t: DSAUniTerm): (DSAUniTerm, BigInt) = { mkSet(t) if (parent(t) != t) - val (par, offset) = find(parent(t)) + val (par, offset) = findWithOffset(parent(t)) parent += t -> par offsets += t -> offsets(t).+(offset) @@ -47,26 +46,27 @@ class DSAUnionFindSolver[A] { * We assume `t1` and `t2` to be distinct canonical elements. This implementation does not use * [[https://en.wikipedia.org/wiki/Disjoint-set_data_structure union-by-rank]]. */ - def mkUnion(t1: Term[A], t2: Term[A], offset: BigInt): Unit = + private def mkUnion(t1: DSAUniTerm, t2: DSAUniTerm, offset: BigInt): Unit = parent += t1 -> t2 offsets += t1 -> offset /** Creates an equivalence class for the term `t`, if it does not exists already. */ - def mkSet(t: Term[A]): Unit = + private def mkSet(t: DSAUniTerm): Unit = if (!parent.contains(t)) parent += (t -> t) offsets += (t -> 0) + +} - /** Returns the solution of the solver. Note that the terms in the solution have not yet been closed, i.e. they may - * contain constraint variables. - * - * @return - * a map associating to each variable the representative of its equivalence class - */ - def solution(): Map[Var[A], Term[A]] = - // for each constraint variable, find its canonical representative (using the variable itself as default) - parent.keys.collect { case v: Var[A] => (v, find(v)._1) }.toMap.withDefault(v => v) +/** Terms used in unification. + */ +sealed trait UniTerm +/** A term variable in the solver + */ +case class DSAUniTerm(node: DSN) extends Var[UniTerm] { + override def toString: String = s"Term{${node}}" } + diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 9e0bf23ee..a0db82225 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -705,8 +705,8 @@ object StaticAnalysis { val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = vsaSolver.analyze() Logger.info("[!] Running Interprocedural Live Variables Analysis") - //val interLiveVarsResults = InterLiveVarsAnalysis(IRProgram).analyze() - val interLiveVarsResults = Map[CFGPosition, Map[Variable, TwoElement]]() + val interLiveVarsResults = InterLiveVarsAnalysis(IRProgram).analyze() +// val interLiveVarsResults = Map[CFGPosition, Map[Variable, TwoElement]]() Logger.info("[!] Running Parameter Analysis") //val paramResults = ParamAnalysis(IRProgram).analyze() @@ -718,7 +718,7 @@ object StaticAnalysis { IRconstPropResult = newCPResult, memoryRegionResult = mraResult, vsaResult = vsaResult, - interLiveVarsResults = Map.empty, + interLiveVarsResults = interLiveVarsResults, paramResults = Map.empty, steensgaardResults = steensgaardResults, mmmResults = mmm, diff --git a/src/test/scala/DSATest.scala b/src/test/scala/DSATest.scala index 240f7e329..f9b98a585 100644 --- a/src/test/scala/DSATest.scala +++ b/src/test/scala/DSATest.scala @@ -1,4 +1,4 @@ -import analysis.{AddressRange, DSC, DSG, DSN, DataLocation, Derm, HeapLocation} +import analysis.{AddressRange, DSC, DSG, DSN, DataLocation, HeapLocation} import ir.Endian.BigEndian import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, CFGPosition, ConvertToSingleProcedureReturn, DirectCall, Memory, MemoryAssign, MemoryLoad, Register, SharedMemory} import org.scalatest.funsuite.AnyFunSuite diff --git a/src/test/scala/LiveVarsAnalysisTests.scala b/src/test/scala/LiveVarsAnalysisTests.scala index 443dc011f..6643eef19 100644 --- a/src/test/scala/LiveVarsAnalysisTests.scala +++ b/src/test/scala/LiveVarsAnalysisTests.scala @@ -296,6 +296,7 @@ class LiveVarsAnalysisTests extends AnyFunSuite, TestUtil { test("basic_arrays_write") { val result: BASILResult = runExample("basic_arrays_write") val analysisResults = result.analysis.get.interLiveVarsResults + val blocks = result.ir.program.blocks // main has a parameter, R0 should be alive From 851872335db8eb55260d67a151ded4311aca2384 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 23 Sep 2024 16:33:42 +1000 Subject: [PATCH 058/104] fixes --- src/main/scala/analysis/DSA.scala | 2 +- src/main/scala/analysis/DSAUtility.scala | 6 +++--- src/main/scala/analysis/Local.scala | 6 +++--- .../analysis/SymbolicAccessAnalysis.scala | 20 +++++++++---------- src/main/scala/util/RunUtils.scala | 10 +++++----- 5 files changed, 22 insertions(+), 22 deletions(-) diff --git a/src/main/scala/analysis/DSA.scala b/src/main/scala/analysis/DSA.scala index cd4ece1c5..77ed7b738 100644 --- a/src/main/scala/analysis/DSA.scala +++ b/src/main/scala/analysis/DSA.scala @@ -20,7 +20,7 @@ import scala.collection.mutable * @param params mapping from procedures to their parameters */ class DSA(program: Program, - symResults: Map[CFGPosition, Map[SymbolicAccess, TwoElement]], + symResults: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], globals: Set[SymbolTableEntry], globalOffsets: Map[BigInt, BigInt], externalFunctions: Set[ExternalFunction], diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index b161636f9..f0863f134 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -30,7 +30,7 @@ object NodeCounter { */ class DSG(val proc: Procedure, constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - varToSym: Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]], + varToSym: Map[CFGPosition, Map[Variable, Set[SymbolicAddress]]], globals: Set[SymbolTableEntry], globalOffsets: Map[BigInt, BigInt], externalFunctions: Set[ExternalFunction], val reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], @@ -76,7 +76,7 @@ class DSG(val proc: Procedure, varToSym(pos)(arg1).foldLeft(m) { // go through all the symbolic accesses tied to arg1 at pos (m, sym) => sym match - case SymbolicAccess(accessor, StackLocation(regionIdentifier, proc, size), symOffset) => // only consider stack accesses + case SymbolicAddress(accessor, StackLocation(regionIdentifier, proc, size), symOffset) => // only consider stack accesses offset = offset + symOffset createStackMapping(pos.toShortString, offset, m, byteSize) case _ => m @@ -85,7 +85,7 @@ class DSG(val proc: Procedure, varToSym(pos)(arg).foldLeft(m) { (m, sym) => sym match - case SymbolicAccess(accessor, StackLocation(regionIdentifier, proc, size), offset) => + case SymbolicAddress(accessor, StackLocation(regionIdentifier, proc, size), offset) => createStackMapping(pos.toShortString, offset, m, byteSize) case _ => m } diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/Local.scala index 56cd767dc..8c9076f1d 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/Local.scala @@ -23,7 +23,7 @@ import scala.collection.mutable */ class Local( proc: Procedure, - symResults: Map[CFGPosition, Map[SymbolicAccess, TwoElement]], + symResults: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], globals: Set[SymbolTableEntry], globalOffsets: Map[BigInt, BigInt], externalFunctions: Set[ExternalFunction], @@ -40,10 +40,10 @@ class Local( // variables to symbolic access map for each cfg position - val varToSym: Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]] = symResults.foldLeft(Map[CFGPosition, Map[Variable, Set[SymbolicAccess]]]()) { + val varToSym: Map[CFGPosition, Map[Variable, Set[SymbolicAddress]]] = symResults.foldLeft(Map[CFGPosition, Map[Variable, Set[SymbolicAddress]]]()) { (outerMap, syms) => val position = syms._1 - val innerMap = syms._2.foldLeft(Map[Variable, Set[SymbolicAccess]]()) { + val innerMap = syms._2.foldLeft(Map[Variable, Set[SymbolicAddress]]()) { (m, access) => if m.contains(access._1.accessor) then // every variable pointing to a stack region ONLY has one symbolic access associated with it. diff --git a/src/main/scala/analysis/SymbolicAccessAnalysis.scala b/src/main/scala/analysis/SymbolicAccessAnalysis.scala index 16720a747..6d99d57b4 100644 --- a/src/main/scala/analysis/SymbolicAccessAnalysis.scala +++ b/src/main/scala/analysis/SymbolicAccessAnalysis.scala @@ -7,8 +7,8 @@ import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, Di import java.math.BigInteger -case class SymbolicAccess(accessor: Variable, symbolicBase: MemoryLocation, offset: BigInt) { - override def toString: String = s"SymbolicAccess($accessor, $symbolicBase, $offset)" +case class SymbolicAddress(accessor: Variable, symbolicBase: MemoryLocation, offset: BigInt) { + override def toString: String = s"SymbolicAddress($accessor, $symbolicBase, $offset)" } trait MemoryLocation { @@ -39,7 +39,7 @@ case class UnknownLocation(override val regionIdentifier: String, proc: Procedur * elements in D are symbolic accesses of the form (variable, symbolic base, concrete offset) * lattice L is a binary lattice with top and bottom */ -trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]]) extends ForwardIDEAnalysis[SymbolicAccess, TwoElement, TwoElementLattice] { +trait SymbolicAddressFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]]) extends ForwardIDEAnalysis[SymbolicAddress, TwoElement, TwoElementLattice] { private val stackPointer = Register("R31", 64) private val linkRegister = Register("R30", 64) @@ -104,12 +104,12 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem case Left(value) => Map(d -> IdEdge()) case Right(_) => val size = bv2SignedInt(v) - Map(d -> IdEdge(), Left(SymbolicAccess(variable, StackLocation(s"Stack_${procedure(n).name}", procedure(n), -size), 0)) -> ConstEdge(TwoElementTop)) + Map(d -> IdEdge(), Left(SymbolicAddress(variable, StackLocation(s"Stack_${procedure(n).name}", procedure(n), -size), 0)) -> ConstEdge(TwoElementTop)) else d match case Left(value) if value.accessor == arg1 => val offsetUpdate = evaluateExpression(arg2, constProp(n)).get.value - val result: Map[DL, EdgeFunction[TwoElement]] = Map(Left(SymbolicAccess(variable, value.symbolicBase, value.offset + offsetUpdate)) -> ConstEdge(TwoElementTop)) + val result: Map[DL, EdgeFunction[TwoElement]] = Map(Left(SymbolicAddress(variable, value.symbolicBase, value.offset + offsetUpdate)) -> ConstEdge(TwoElementTop)) if value.accessor != variable then result + (d -> IdEdge()) else @@ -120,7 +120,7 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem case arg:Variable => d match case Left(value) if value.accessor == arg => - val result: Map[DL, EdgeFunction[TwoElement]] = Map(Left(SymbolicAccess(variable, value.symbolicBase, value.offset)) -> ConstEdge(TwoElementTop)) + val result: Map[DL, EdgeFunction[TwoElement]] = Map(Left(SymbolicAddress(variable, value.symbolicBase, value.offset)) -> ConstEdge(TwoElementTop)) if value.accessor != variable then result + (d -> IdEdge()) else @@ -131,7 +131,7 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem d match case Left(value) if value.accessor == variable => Map() case Left(value) => Map(d -> IdEdge()) - case Right(_) => Map(d -> IdEdge(), Left(SymbolicAccess(variable, UnknownLocation(nextunknownCount, procedure(n)), 0)) -> ConstEdge(TwoElementTop)) + case Right(_) => Map(d -> IdEdge(), Left(SymbolicAddress(variable, UnknownLocation(nextunknownCount, procedure(n)), 0)) -> ConstEdge(TwoElementTop)) case _ => d match case Left(value) if value.accessor == variable => Map() @@ -144,9 +144,9 @@ trait SymbolicAccessFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElem val size: BigInt = evaluateExpression(mallocVariable, constProp(n)) match case Some(value) => value.value case None => -1 - Map(d -> IdEdge(), Left(SymbolicAccess(mallocVariable, HeapLocation(nextMallocCount, procedure(n), size), 0)) -> ConstEdge(TwoElementTop)) + Map(d -> IdEdge(), Left(SymbolicAddress(mallocVariable, HeapLocation(nextMallocCount, procedure(n), size), 0)) -> ConstEdge(TwoElementTop)) case _ => Map(d -> IdEdge()) } -class SymbolicAccessAnalysis(program: Program, constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]]) - extends ForwardIDESolver[SymbolicAccess, TwoElement, TwoElementLattice](program), SymbolicAccessFunctions(constProp) \ No newline at end of file +class SymbolicAddressAnalysis(program: Program, constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]]) + extends ForwardIDESolver[SymbolicAddress, TwoElement, TwoElementLattice](program), SymbolicAddressFunctions(constProp) \ No newline at end of file diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index a0db82225..af7d19905 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -61,7 +61,7 @@ case class StaticAnalysisContext( mmmResults: MemoryModelMap, memoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - symbolicAccessess: Map[CFGPosition, Map[SymbolicAccess, TwoElement]], + SymbolicAddressess: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], locals: Option[Map[Procedure, DSG]], bus: Option[Map[Procedure, DSG]], tds: Option[Map[Procedure, DSG]], @@ -723,7 +723,7 @@ object StaticAnalysis { steensgaardResults = steensgaardResults, mmmResults = mmm, memoryRegionContents = memoryRegionContents, - symbolicAccessess = Map.empty, + SymbolicAddressess = Map.empty, reachingDefs = reachingDefinitionsAnalysisResults, locals = None, bus = None, @@ -976,8 +976,8 @@ object RunUtils { ) Logger.info("[!] Running Symbolic Access Analysis") - val symResults: Map[CFGPosition, Map[SymbolicAccess, TwoElement]] = - SymbolicAccessAnalysis(ctx.program, analysisResult.last.IRconstPropResult).analyze() + val symResults: Map[CFGPosition, Map[SymbolicAddress, TwoElement]] = + SymbolicAddressAnalysis(ctx.program, analysisResult.last.IRconstPropResult).analyze() config.analysisDotPath.foreach(s => writeToFile(toDot(ctx.program, symResults.foldLeft(Map(): Map[CFGPosition, String]) { (m, t) => @@ -1006,7 +1006,7 @@ object RunUtils { steensgaardResults = analysisResult.last.steensgaardResults, mmmResults = analysisResult.last.mmmResults, memoryRegionContents = analysisResult.last.memoryRegionContents, - symbolicAccessess = symResults, // analysisResult.last.symbolicAccessess, + SymbolicAddressess = symResults, // analysisResult.last.SymbolicAddressess, locals = Some(dsa.locals.toMap), bus = Some(dsa.bu.toMap), tds = Some(dsa.td.toMap), From b4fe131e1e6eedcbaea390d841d4e6214988e7c7 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Tue, 24 Sep 2024 10:07:45 +1000 Subject: [PATCH 059/104] fixes --- src/main/scala/analysis/DSA.scala | 134 +++++++++--------- ...is.scala => SymbolicAddressAnalysis.scala} | 3 +- .../interproc_pointer_arithmetic.adt | 0 .../interproc_pointer_arithmetic.bir | 0 .../interproc_pointer_arithmetic.c | 0 .../interproc_pointer_arithmetic.relf | 0 .../unsafe_pointer_arithmetic.adt | 0 .../unsafe_pointer_arithmetic.bir | 0 .../unsafe_pointer_arithmetic.c | 0 .../unsafe_pointer_arithmetic.relf | 0 src/test/scala/DSATest.scala | 32 ++--- 11 files changed, 86 insertions(+), 83 deletions(-) rename src/main/scala/analysis/{SymbolicAccessAnalysis.scala => SymbolicAddressAnalysis.scala} (97%) rename {examples => src/test/dsa}/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt (100%) rename {examples => src/test/dsa}/interproc_pointer_arithmetic/interproc_pointer_arithmetic.bir (100%) rename {examples => src/test/dsa}/interproc_pointer_arithmetic/interproc_pointer_arithmetic.c (100%) rename {examples => src/test/dsa}/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf (100%) rename {examples => src/test/dsa}/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.adt (100%) rename {examples => src/test/dsa}/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.bir (100%) rename {examples => src/test/dsa}/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.c (100%) rename {examples => src/test/dsa}/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.relf (100%) diff --git a/src/main/scala/analysis/DSA.scala b/src/main/scala/analysis/DSA.scala index 77ed7b738..6101b6311 100644 --- a/src/main/scala/analysis/DSA.scala +++ b/src/main/scala/analysis/DSA.scala @@ -85,73 +85,75 @@ class DSA(program: Program, // bottom up phase while queue.nonEmpty do - val proc = queue.dequeue() + var proc = queue.dequeue() + while !locals.contains(proc) && queue.nonEmpty do proc = queue.dequeue() visited += proc - queue.enqueueAll(CallGraph.pred(proc).diff(visited)) - val buGraph = bu(proc) - - buGraph.callsites.foreach( - callSite => - val callee = callSite.proc - val calleeGraph = locals(callee) //.cloneSelf() - assert(buGraph.globalMapping.keySet.equals(calleeGraph.globalMapping.keySet)) - assert(calleeGraph.formals.keySet.diff(ignoreRegisters).equals(callSite.paramCells.keySet)) - - calleeGraph.globalMapping.foreach { - case (range: AddressRange, Field(node, offset)) => - val newNode = calleeGraph.find(node).node - newNode.cloneNode(calleeGraph, buGraph) - } - - calleeGraph.formals.foreach{ - case (variable: Variable, slice: Slice) if !ignoreRegisters.contains(variable) => - assert(callSite.paramCells.contains(variable)) - val node = calleeGraph.find(slice).node - node.cloneNode(calleeGraph, buGraph) - case _ => - } - - assert(writesTo(callee).equals(callSite.returnCells.keySet)) - writesTo(callee).foreach( - reg => - assert(callSite.returnCells.contains(reg)) - val returnCells = calleeGraph.getCells(end(callee), reg).map(calleeGraph.find) - assert(returnCells.nonEmpty) - returnCells.foreach{ - case slice: Slice => - val node = calleeGraph.find(slice).node - node.cloneNode(calleeGraph, buGraph) - } - ) - -// assert(calleeGraph.formals.isEmpty || buGraph.varToCell(begin(callee)).equals(calleeGraph.formals)) - val globalNodes: mutable.Map[Int, DSN] = mutable.Map() - calleeGraph.globalMapping.foreach { - case (range: AddressRange, Field(node: DSN, offset: BigInt)) => - val field = calleeGraph.find(node) - buGraph.mergeCells(buGraph.globalMapping(range)._1.getCell(buGraph.globalMapping(range)._2), - field.node.getCell(field.offset + offset)) - } - - buGraph.varToCell.getOrElse(begin(callee), Map.empty).foreach{ - case (variable: Variable, formal) if !ignoreRegisters.contains(variable) => - val test = buGraph.mergeCells(buGraph.adjust(formal), buGraph.adjust(callSite.paramCells(variable))) - test - case _ => - } - writesTo(callee).foreach( - reg => - val returnCells = buGraph.getCells(end(callee), reg) - // assert(returnCells.nonEmpty) - val result: DSC = returnCells.foldLeft(buGraph.adjust(callSite.returnCells(reg))){ - // - case (c: DSC, ret) => - val test = buGraph.mergeCells(c, buGraph.adjust(ret)) - test - } - ) - ) - buGraph.collectNodes + if locals.contains(proc) then + queue.enqueueAll(CallGraph.pred(proc).diff(visited)) + val buGraph = bu(proc) + + buGraph.callsites.foreach( + callSite => + val callee = callSite.proc + val calleeGraph = locals(callee) //.cloneSelf() + assert(buGraph.globalMapping.keySet.equals(calleeGraph.globalMapping.keySet)) + assert(calleeGraph.formals.keySet.diff(ignoreRegisters).equals(callSite.paramCells.keySet)) + + calleeGraph.globalMapping.foreach { + case (range: AddressRange, Field(node, offset)) => + val newNode = calleeGraph.find(node).node + newNode.cloneNode(calleeGraph, buGraph) + } + + calleeGraph.formals.foreach{ + case (variable: Variable, slice: Slice) if !ignoreRegisters.contains(variable) => + assert(callSite.paramCells.contains(variable)) + val node = calleeGraph.find(slice).node + node.cloneNode(calleeGraph, buGraph) + case _ => + } + + assert(writesTo(callee).equals(callSite.returnCells.keySet)) + writesTo(callee).foreach( + reg => + assert(callSite.returnCells.contains(reg)) + val returnCells = calleeGraph.getCells(end(callee), reg).map(calleeGraph.find) + assert(returnCells.nonEmpty) + returnCells.foreach{ + case slice: Slice => + val node = calleeGraph.find(slice).node + node.cloneNode(calleeGraph, buGraph) + } + ) + + // assert(calleeGraph.formals.isEmpty || buGraph.varToCell(begin(callee)).equals(calleeGraph.formals)) + val globalNodes: mutable.Map[Int, DSN] = mutable.Map() + calleeGraph.globalMapping.foreach { + case (range: AddressRange, Field(node: DSN, offset: BigInt)) => + val field = calleeGraph.find(node) + buGraph.mergeCells(buGraph.globalMapping(range)._1.getCell(buGraph.globalMapping(range)._2), + field.node.getCell(field.offset + offset)) + } + + buGraph.varToCell.getOrElse(begin(callee), Map.empty).foreach{ + case (variable: Variable, formal) if !ignoreRegisters.contains(variable) => + val test = buGraph.mergeCells(buGraph.adjust(formal), buGraph.adjust(callSite.paramCells(variable))) + test + case _ => + } + writesTo(callee).foreach( + reg => + val returnCells = buGraph.getCells(end(callee), reg) + // assert(returnCells.nonEmpty) + val result: DSC = returnCells.foldLeft(buGraph.adjust(callSite.returnCells(reg))){ + // + case (c: DSC, ret) => + val test = buGraph.mergeCells(c, buGraph.adjust(ret)) + test + } + ) + ) + buGraph.collectNodes // bottom up phase finished // clone bu graphs to top-down graphs domain.foreach( diff --git a/src/main/scala/analysis/SymbolicAccessAnalysis.scala b/src/main/scala/analysis/SymbolicAddressAnalysis.scala similarity index 97% rename from src/main/scala/analysis/SymbolicAccessAnalysis.scala rename to src/main/scala/analysis/SymbolicAddressAnalysis.scala index 6d99d57b4..026cbf643 100644 --- a/src/main/scala/analysis/SymbolicAccessAnalysis.scala +++ b/src/main/scala/analysis/SymbolicAddressAnalysis.scala @@ -37,7 +37,8 @@ case class UnknownLocation(override val regionIdentifier: String, proc: Procedur * environment transformers for SAA or symbolic access analysis * Combination of reaching definitions and constant propagation * elements in D are symbolic accesses of the form (variable, symbolic base, concrete offset) - * lattice L is a binary lattice with top and bottom + * lattice L is a binary lattice with top being the definition is valid (alive) and bottom being + * the definition is dead or no longer affects the environment */ trait SymbolicAddressFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]]) extends ForwardIDEAnalysis[SymbolicAddress, TwoElement, TwoElementLattice] { diff --git a/examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt b/src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt similarity index 100% rename from examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt rename to src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt diff --git a/examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.bir b/src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.bir similarity index 100% rename from examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.bir rename to src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.bir diff --git a/examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.c b/src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.c similarity index 100% rename from examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.c rename to src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.c diff --git a/examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf b/src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf similarity index 100% rename from examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf rename to src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf diff --git a/examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.adt b/src/test/dsa/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.adt similarity index 100% rename from examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.adt rename to src/test/dsa/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.adt diff --git a/examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.bir b/src/test/dsa/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.bir similarity index 100% rename from examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.bir rename to src/test/dsa/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.bir diff --git a/examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.c b/src/test/dsa/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.c similarity index 100% rename from examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.c rename to src/test/dsa/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.c diff --git a/examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.relf b/src/test/dsa/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.relf similarity index 100% rename from examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.relf rename to src/test/dsa/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.relf diff --git a/src/test/scala/DSATest.scala b/src/test/scala/DSATest.scala index f9b98a585..47d99f285 100644 --- a/src/test/scala/DSATest.scala +++ b/src/test/scala/DSATest.scala @@ -178,8 +178,8 @@ class DSATest extends AnyFunSuite, TestUtil { val results = RunUtils.loadAndTranslate( BASILConfig( loading = ILLoadingConfig( - inputFile = "examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.adt", - relfFile = "examples/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.relf", + inputFile = "src/test/dsa/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.adt", + relfFile = "src/test/dsa/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.relf", specFile = None, dumpIL = None, ), @@ -239,8 +239,8 @@ class DSATest extends AnyFunSuite, TestUtil { val results = RunUtils.loadAndTranslate( BASILConfig( loading = ILLoadingConfig( - inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", - relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", + inputFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", + relfFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", specFile = None, dumpIL = None, ), @@ -274,8 +274,8 @@ class DSATest extends AnyFunSuite, TestUtil { val results = RunUtils.loadAndTranslate( BASILConfig( loading = ILLoadingConfig( - inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", - relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", + inputFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", + relfFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", specFile = None, dumpIL = None, ), @@ -513,8 +513,8 @@ class DSATest extends AnyFunSuite, TestUtil { val results = RunUtils.loadAndTranslate( BASILConfig( loading = ILLoadingConfig( - inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", - relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", + inputFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", + relfFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", specFile = None, dumpIL = None, ), @@ -539,8 +539,8 @@ class DSATest extends AnyFunSuite, TestUtil { val results = RunUtils.loadAndTranslate( BASILConfig( loading = ILLoadingConfig( - inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", - relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", + inputFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", + relfFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", specFile = None, dumpIL = None, ), @@ -578,7 +578,7 @@ class DSATest extends AnyFunSuite, TestUtil { // top down tests ignore("top down jumptable2 main") { - // no changes should be made from previous phase + // no changes should be made from previous phase val results = RunUtils.loadAndTranslate( BASILConfig( loading = ILLoadingConfig( @@ -620,7 +620,7 @@ class DSATest extends AnyFunSuite, TestUtil { // bu assert(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)).getPointee.node.collapsed) - + } ignore("top down jumptable2 callees") { @@ -662,8 +662,8 @@ class DSATest extends AnyFunSuite, TestUtil { val results = RunUtils.loadAndTranslate( BASILConfig( loading = ILLoadingConfig( - inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", - relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", + inputFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", + relfFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", specFile = None, dumpIL = None, ), @@ -691,8 +691,8 @@ class DSATest extends AnyFunSuite, TestUtil { val results = RunUtils.loadAndTranslate( BASILConfig( loading = ILLoadingConfig( - inputFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", - relfFile = "examples/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", + inputFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", + relfFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", specFile = None, dumpIL = None, ), From 4b634c3eebd58aea18280574ab1e5f5245fbea30 Mon Sep 17 00:00:00 2001 From: l-kent Date: Thu, 26 Sep 2024 10:49:31 +1000 Subject: [PATCH 060/104] cleanup analyses + remove unused 'sharedVariable' in Variable --- .../InterprocSteensgaardAnalysis.scala | 42 +++----- src/main/scala/analysis/MemoryModelMap.scala | 6 +- .../scala/analysis/MemoryRegionAnalysis.scala | 10 +- src/main/scala/analysis/RegionInjector.scala | 20 ++-- src/main/scala/analysis/SSAForm.scala | 100 ------------------ src/main/scala/ir/Expr.scala | 3 +- src/main/scala/util/RunUtils.scala | 2 +- 7 files changed, 30 insertions(+), 153 deletions(-) delete mode 100644 src/main/scala/analysis/SSAForm.scala diff --git a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala index 82e8dc8b8..560b3f5ba 100644 --- a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala +++ b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala @@ -39,7 +39,6 @@ case class RegisterWrapperEqualSets(variable: Variable, assigns: Set[Assign]) { class InterprocSteensgaardAnalysis( program: Program, constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - regionAccesses: Map[CFGPosition, Map[RegisterVariableWrapper, FlatElement[Expr]]], mmm: MemoryModelMap, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], globalOffsets: Map[BigInt, BigInt]) extends Analysis[Any] { @@ -192,17 +191,9 @@ class InterprocSteensgaardAnalysis( case binOp: BinaryExpr if binOp.arg1 == stackPointer => val b = evaluateExpression(binOp.arg2, constantProp(n)) if (b.isDefined) { - if binOp.arg2.variables.exists { v => v.sharedVariable } then { - Logger.debug("Shared stack object: " + b) - Logger.debug("Shared in: " + expr) - val regions = mmm.findSharedStackObject(b.get.value) - Logger.debug("found: " + regions) - res ++= regions - } else { - val region = mmm.findStackObject(b.get.value) - if (region.isDefined) { - res = res + region.get - } + val region = mmm.findStackObject(b.get.value) + if (region.isDefined) { + res = res + region.get } } res @@ -415,13 +406,22 @@ case class ExpressionVariable(expr: MemoryRegion | Expr) extends StTerm with Var /** A fresh term variable. */ -case class FreshVariable(var id: Int = 0) extends StTerm with Var[StTerm] { +case class FreshVariable(id: Int) extends StTerm with Var[StTerm] { + override def toString: String = s"x$id" +} - id = Fresh.next() +object FreshVariable { + var n = 0 - override def toString: String = s"x$id" + def next(): Int = { + n += 1 + n + } + + def apply(): FreshVariable = FreshVariable(next()) } + /** A constructor term that represents a pointer to another term. */ case class PointerRef(of: Term[StTerm]) extends StTerm with Cons[StTerm] { @@ -431,16 +431,4 @@ case class PointerRef(of: Term[StTerm]) extends StTerm with Cons[StTerm] { def subst(v: Var[StTerm], t: Term[StTerm]): Term[StTerm] = PointerRef(of.subst(v, t)) override def toString: String = s"$of" -} - -/** Counter for producing fresh IDs. - */ -object Fresh { - - var n = 0 - - def next(): Int = { - n += 1 - n - } } \ No newline at end of file diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index 7a7b9ea1c..73afdcf89 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -326,7 +326,7 @@ class MemoryModelMap { } def getAllHeapRegions: Set[HeapRegion] = { - heapMap.values.toSet.map(returnRegion) + heapMap.values.toSet.map(returnRegion) } def getAllRegions: Set[MemoryRegion] = { @@ -456,11 +456,11 @@ case class StackRegion(override val regionIdentifier: String, start: BigInt, par override def toString: String = s"Stack($regionIdentifier, $start, ${parent.name}, $subAccesses)" } -case class HeapRegion(override val regionIdentifier: String, size: BigInt, parent: Procedure) extends MemoryRegion { +case class HeapRegion(override val regionIdentifier: String, size: Int, parent: Procedure) extends MemoryRegion { override def toString: String = s"Heap($regionIdentifier, $size)" } -case class DataRegion(override val regionIdentifier: String, start: BigInt, size: BigInt) extends MemoryRegion { +case class DataRegion(override val regionIdentifier: String, start: BigInt, size: Int) extends MemoryRegion { override def toString: String = s"Data($regionIdentifier, $start)" } diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 3653f5cc4..9e5deb071 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -42,14 +42,13 @@ trait MemoryRegionAnalysis(val program: Program, */ private def poolMaster(expr: BigInt, stackBase: Procedure, subAccess: BigInt): StackRegion = { val stackPool = stackMap.getOrElseUpdate(stackBase, mutable.HashMap()) - var region: StackRegion = null - if (stackPool.contains(expr)) { - region = stackPool(expr) + val region = if (stackPool.contains(expr)) { + stackPool(expr) } else { val newRegion = StackRegion(nextStackCount(), expr, stackBase) addReturnStack(stackBase, newRegion) stackPool += (expr -> newRegion) - region = newRegion + newRegion } region.subAccesses.add((subAccess.toDouble/8).ceil.toInt) region @@ -230,8 +229,7 @@ trait MemoryRegionAnalysis(val program: Program, if (directCall.target.name == "malloc") { evaluateExpression(mallocVariable, constantProp(n)) match { case Some(b: BitVecLiteral) => - val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value - val newHeapRegion = HeapRegion(nextMallocCount(), negB, IRWalk.procedure(n)) + val newHeapRegion = HeapRegion(nextMallocCount(), b.value.toInt, IRWalk.procedure(n)) addReturnHeap(directCall, newHeapRegion) regionLattice.lub(s, Set(newHeapRegion)) case None => s diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index 11ed03bd8..59a5614bf 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -152,24 +152,16 @@ class RegionInjector(domain: mutable.Set[CFGPosition], case binOp: BinaryExpr if binOp.arg1 == stackPointer => val b = evaluateExpression(binOp.arg2, constantProp(n)) if (b.isDefined) { - if binOp.arg2.variables.exists { v => v.sharedVariable } then { - Logger.debug("Shared stack object: " + b) - Logger.debug("Shared in: " + expr) - val regions = mmm.findSharedStackObject(b.get.value) - Logger.debug("found: " + regions) - res ++= regions - } else { - if (isNegative(b.get)) { - val region = mmm.findStackObject(0) - if (region.isDefined) { - res = res + region.get - } - } - val region = mmm.findStackObject(b.get.value) + if (isNegative(b.get)) { + val region = mmm.findStackObject(0) if (region.isDefined) { res = res + region.get } } + val region = mmm.findStackObject(b.get.value) + if (region.isDefined) { + res = res + region.get + } } case binaryExpr: BinaryExpr => res ++= reducibleToRegion(binaryExpr, n) diff --git a/src/main/scala/analysis/SSAForm.scala b/src/main/scala/analysis/SSAForm.scala deleted file mode 100644 index 044780a6b..000000000 --- a/src/main/scala/analysis/SSAForm.scala +++ /dev/null @@ -1,100 +0,0 @@ -//package analysis -// -//import analysis.* -//import ir.{SignExtend, *} -//import util.Logger -// -//import scala.collection.mutable -// -///** Set-Based SSA -// * - Each variable has a set of versions -// * - New assignments create new versions and replaces any new versions -// * -// * NOTE: This approach does not make an attempt to handle loops -// */ -//class SSAForm(program: Program) { -// -// private val varMaxTracker = mutable.HashMap[String, Int]() -// private val blockBasedMappings = mutable.HashMap[(Block, String), mutable.Set[Int]]().withDefault(_ => mutable.Set()) -// private val context = mutable.HashMap[(Procedure, String), mutable.Set[Int]]().withDefault(_ => mutable.Set()) -// private def getMax(varName: String): Int = -// val ret = varMaxTracker.getOrElse(varName, 0) -// varMaxTracker(varName) = ret + 1 -// ret -// -// private def transformVariables(vars: Set[Variable], block: Block, proc: Procedure): Unit = { -// vars.foreach { v => -// if (context.contains((proc, v.name))) { -// v.sharedVariable = true -// } -// v.ssa_id.clear() -// val contextResult = context.getOrElseUpdate((proc, v.name), mutable.Set(getMax(v.name))) -// v.ssa_id.addAll(blockBasedMappings.getOrElseUpdate((block, v.name), contextResult)) -// } -// } -// -// def applySSA(): Unit = { -// for (proc <- program.procedures) { -// val visitedBlocks = mutable.Set[Block]() -// val stack = mutable.Stack[Block]() -// -// // Start with the entry block -// if (proc.entryBlock.isDefined) { -// stack.push(proc.entryBlock.get) -// } -// -// while (stack.nonEmpty) { -// val currentBlock = stack.pop() -// -// if (!visitedBlocks.contains(currentBlock)) { -// visitedBlocks.add(currentBlock) -// -// for (stmt <- currentBlock.statements) { -// Logger.debug(stmt) -// stmt match { -// case localAssign: LocalAssign => -// transformVariables(localAssign.rhs.variables, currentBlock, proc) -// val maxVal = varMaxTracker.getOrElseUpdate(localAssign.lhs.name, 0) -// blockBasedMappings((currentBlock, localAssign.lhs.name)) = mutable.Set(maxVal) -// -// localAssign.lhs.ssa_id.clear() -// localAssign.lhs.ssa_id.addAll(blockBasedMappings((currentBlock, localAssign.lhs.name))) -// -// varMaxTracker(localAssign.lhs.name) = blockBasedMappings((currentBlock, localAssign.lhs.name)).max + 1 -// -// case memoryAssign: MemoryAssign => -// transformVariables(memoryAssign.rhs.variables, currentBlock, proc) -// -// case assume: Assume => -// transformVariables(assume.body.variables, currentBlock, proc) -// // no required for analyses -// case assert: Assert => -// transformVariables(assert.body.variables, currentBlock, proc) -// // no required for analyses -// case _ => throw new RuntimeException("No SSA form for " + stmt.getClass + " yet") -// } -// } -// currentBlock.jump match { -// case directCall: DirectCall => -// // TODO: transfers the whole context but it could be using ANR and RNA to transfer only the relevant context -// varMaxTracker.keys.foreach { varr => -// //context((directCall.target, varr)) = context((directCall.target, varr)) ++ blockBasedMappings(block, varr) -// context.getOrElseUpdate((directCall.target, varr), mutable.Set()) ++= blockBasedMappings((currentBlock, varr)) -// } -// case indirectCall: IndirectCall => -// transformVariables(indirectCall.target.variables, currentBlock, proc) -// case goTo: GoTo => -// for { -// b <- goTo.targets -// varr <- varMaxTracker.keys -// } { -// blockBasedMappings((b, varr)) ++= blockBasedMappings(currentBlock, varr) -// } -// } -// // Push unvisited successors onto the stack -// stack.pushAll(currentBlock.nextBlocks) -// } -// } -// } -// } -//} diff --git a/src/main/scala/ir/Expr.scala b/src/main/scala/ir/Expr.scala index 6a7c862cd..43fde843b 100644 --- a/src/main/scala/ir/Expr.scala +++ b/src/main/scala/ir/Expr.scala @@ -336,7 +336,6 @@ sealed trait Global sealed trait Variable extends Expr { val name: String val irType: IRType - var sharedVariable: Boolean = false override def getType: IRType = irType override def variables: Set[Variable] = Set(this) @@ -365,7 +364,7 @@ case class Register(override val name: String, size: Int) extends Variable with case class LocalVar(override val name: String, override val irType: IRType) extends Variable { override def toGamma: BVar = BVariable(s"Gamma_$name", BoolBType, Scope.Local) override def toBoogie: BVar = BVariable(s"$name", irType.toBoogie, Scope.Local) - override def toString: String = s"LocalVar(${name}_$sharedVariable, $irType)" + override def toString: String = s"LocalVar(${name}, $irType)" override def acceptVisit(visitor: Visitor): Variable = visitor.visitLocalVar(this) } diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index c8eb7e306..185018cdc 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -402,7 +402,7 @@ object StaticAnalysis { mmm.logRegions() Logger.debug("[!] Running Steensgaard") - val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResult, regionAccessesAnalysisResults, mmm, reachingDefinitionsAnalysisResults, globalOffsets) + val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResult, mmm, reachingDefinitionsAnalysisResults, globalOffsets) steensgaardSolver.analyze() val steensgaardResults = steensgaardSolver.pointsTo() val memoryRegionContents = steensgaardSolver.getMemoryRegionContents From cd9fe8b179bc442e87ea047931b84104bfb320a8 Mon Sep 17 00:00:00 2001 From: l-kent Date: Thu, 26 Sep 2024 10:58:20 +1000 Subject: [PATCH 061/104] zero out bss sections --- src/main/scala/translating/BAPToIR.scala | 6 +++++- src/main/scala/translating/GTIRBToIR.scala | 21 +++++++++++++-------- 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/src/main/scala/translating/BAPToIR.scala b/src/main/scala/translating/BAPToIR.scala index ae501c277..a4b3aba19 100644 --- a/src/main/scala/translating/BAPToIR.scala +++ b/src/main/scala/translating/BAPToIR.scala @@ -64,7 +64,11 @@ class BAPToIR(var program: BAPProgram, mainAddress: BigInt) { val memorySections: ArrayBuffer[MemorySection] = ArrayBuffer() for (m <- program.memorySections) { - val bytes = m.bytes.map(_.toIR) + val bytes = if (m.name == ".bss" && m.bytes.isEmpty) { + for (_ <- 0 until m.size) yield BitVecLiteral(0, 8) + } else { + m.bytes.map(_.toIR) + } memorySections.append(MemorySection(m.name, m.address, m.size, bytes)) } diff --git a/src/main/scala/translating/GTIRBToIR.scala b/src/main/scala/translating/GTIRBToIR.scala index 7bbe493b4..fab10dc3b 100644 --- a/src/main/scala/translating/GTIRBToIR.scala +++ b/src/main/scala/translating/GTIRBToIR.scala @@ -202,15 +202,20 @@ class GTIRBToIR(mods: Seq[Module], parserMap: immutable.Map[String, Array[Array[ val initialMemory: ArrayBuffer[MemorySection] = ArrayBuffer() sections.map {elem => - val bytestoInt = elem.byteIntervals.head.contents.toByteArray.map(byte => BigInt(byte)) - val bytes = bytestoInt.map {byte => - if (byte < 0) { - BitVecLiteral(byte + (BigInt(1) << 8), 8) - } else { - BitVecLiteral(byte, 8) - } + val bytesToInt = elem.byteIntervals.head.contents.toByteArray.map(byte => BigInt(byte)) + val size = elem.byteIntervals.head.size.toInt + val bytes = if (elem.name == ".bss" && bytesToInt.isEmpty) { + for (_ <- 0 until size) yield BitVecLiteral(0, 8) + } else { + bytesToInt.map { byte => + if (byte < 0) { + BitVecLiteral(byte + (BigInt(1) << 8), 8) + } else { + BitVecLiteral(byte, 8) + } + }.toSeq } - val section = MemorySection(elem.name, elem.byteIntervals.head.address.toInt, elem.byteIntervals.head.size.toInt, bytes.toSeq) + val section = MemorySection(elem.name, BigInt(elem.byteIntervals.head.address), size, bytes) initialMemory += section } From 264fca0a3e2ec02ea27370dd9cba852bb79ee84b Mon Sep 17 00:00:00 2001 From: l-kent Date: Thu, 26 Sep 2024 11:00:33 +1000 Subject: [PATCH 062/104] replace memory accesses in boogie pre/post conditions with identified memory regions --- src/main/scala/analysis/RegionInjector.scala | 303 +++++++++++-------- src/main/scala/ir/Interpreter.scala | 4 +- src/main/scala/ir/Program.scala | 51 +++- src/main/scala/ir/dsl/DSL.scala | 6 +- src/main/scala/translating/BAPToIR.scala | 8 +- src/main/scala/translating/GTIRBToIR.scala | 13 +- src/main/scala/translating/IRToBoogie.scala | 36 ++- src/main/scala/util/RunUtils.scala | 12 +- 8 files changed, 269 insertions(+), 164 deletions(-) diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index 59a5614bf..0b31464ef 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -10,18 +10,111 @@ import scala.collection.mutable.ArrayBuffer /** * Replaces the region access with the calculated memory region. */ -class RegionInjector(domain: mutable.Set[CFGPosition], - program: Program, + +class MergedRegion(var name: String, val subregions: mutable.Set[MemoryRegion]) + +class RegionInjector(program: Program, constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], mmm: MemoryModelMap, - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - globalOffsets: Map[BigInt, BigInt]) { + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])] + ) { private val stackPointer = Register("R31", 64) + val accessToRegion = mutable.Map[Statement, Set[MemoryRegion]]() + val loadToMemory = mutable.Map[Statement, Memory]() + val mergedRegions = mutable.Map[MemoryRegion, MergedRegion]() + def nodeVisitor(): Unit = { - for (elem <- domain) {localTransfer(elem)} - program.initialMemory = transformMemorySections(program.initialMemory) - program.readOnlyMemory = transformMemorySections(program.readOnlyMemory) + // visit reachable procedures + val queue = mutable.Queue[Procedure]() + val visited = mutable.Set[Procedure]() + queue.enqueue(program.mainProcedure) + + while (queue.nonEmpty) { + val procedure = queue.dequeue() + for { + b <- procedure.blocks + s <- b.statements + } { + visitStatement(s) + } + visited.add(procedure) + for (call <- procedure.calls) { + if (!queue.contains(call) && !visited.contains(call)) { + queue.enqueue(call) + } + } + } + + for (access <- accessToRegion.keys) { + val regions = accessToRegion(access) + if (regions.isEmpty) { + //throw Exception("no regions found for " + access) + } else { + mergeRegions(regions) + } + } + + // rename all regions + renameMemory() + + transformMemorySections(program) + } + + def mergeRegions(regions: Set[MemoryRegion]): Unit = { + // TODO need to check that all regions are the same type + val oldMergedRegions = regions.flatMap(r => mergedRegions.get(r)) + if (oldMergedRegions.nonEmpty) { + // TODO rename in sensible deterministic way + val oldRegion = oldMergedRegions.head + for (o <- oldMergedRegions.tail) { + oldRegion.subregions.addAll(o.subregions) + } + oldRegion.subregions.addAll(regions) + for (o <- oldRegion.subregions) { + mergedRegions(o) = oldRegion + } + } else { + // TODO give sensible deterministic name + val mergedRegion = MergedRegion(regions.head.regionIdentifier, mutable.Set()) + mergedRegion.subregions.addAll(regions) + for (m <- mergedRegion.subregions) { + mergedRegions(m) = mergedRegion + } + } + } + + def renameMemory(): Unit = { + for (access <- accessToRegion.keys) { + // all regions associated with an access should have same merged region so no need to check others + val regions = accessToRegion(access) + if (regions.nonEmpty) { + val regionsHead = regions.head + val mergedRegion = mergedRegions(regionsHead) + + access match { + case store: MemoryAssign => + val newMemory = replaceMemory(store.mem, regionsHead, mergedRegion) + store.mem = newMemory + case _ => + val newMemory = replaceMemory(loadToMemory(access), regionsHead, mergedRegion) + val renamer = RegionRenamer(newMemory) + renamer.visitStatement(access) + } + } + + } + } + + def replaceMemory(memory: Memory, region: MemoryRegion, mergedRegion: MergedRegion): Memory = { + region match { + case _: StackRegion => + StackMemory(mergedRegion.name, memory.addressSize, memory.valueSize) + case _: DataRegion => + SharedMemory(mergedRegion.name, memory.addressSize, memory.valueSize) + case _: HeapRegion => + SharedMemory(mergedRegion.name, memory.addressSize, memory.valueSize) + } } /** @@ -38,7 +131,7 @@ class RegionInjector(domain: mutable.Set[CFGPosition], val tableAddress = globalOffsets.getOrElse(address, address) // this condition checks if the address is not layered and returns if it is not if (tableAddress != address && !globalOffsets.contains(tableAddress)) { - return BitVecLiteral(address, 64) + BitVecLiteral(address, 64) } BitVecLiteral(tableAddress, 64) } @@ -77,16 +170,13 @@ class RegionInjector(domain: mutable.Set[CFGPosition], return reducedRegions } val ctx = getUse(variable, n, reachingDefs) - for (i <- ctx) { - if (i != n) { // handles loops (ie. R19 = R19 + 1) %00000662 in jumptable2 - val regions = i.rhs match { - case loadL: MemoryLoad => - val foundRegions = exprToRegion(loadL.index, i) - val toReturn = mutable.Set[MemoryRegion]().addAll(foundRegions) - for { - f <- foundRegions - } { - // TODO: Must enable this (probably need to calculate those contents beforehand) + for (i <- ctx if i != n) { // handles loops (ie. R19 = R19 + 1) + val regions = i.rhs match { + case loadL: MemoryLoad => + val foundRegions = exprToRegion(loadL.index, i) + val toReturn = mutable.Set[MemoryRegion]().addAll(foundRegions) + for (f <- foundRegions) { + // TODO: Must enable this (probably need to calculate those contents beforehand) // if (memoryRegionContents.contains(f)) { // memoryRegionContents(f).foreach { // case b: BitVecLiteral => @@ -99,36 +189,31 @@ class RegionInjector(domain: mutable.Set[CFGPosition], // toReturn.remove(f) // } // } - } - toReturn.toSet - case _: BitVecLiteral => - Set.empty[MemoryRegion] - case _ => - //println(s"Unknown expression: ${i}") - //println(ctx) - exprToRegion(i.rhs, i) - } - val result = evaluateExpression(binExpr.arg2, constantProp(n)) - if (result.isDefined) { - val b = result.get - for { - r <- regions - } { - r match { - case stackRegion: StackRegion => - //println(s"StackRegion: ${stackRegion.start}") - //println(s"BitVecLiteral: ${b}") - //if (b.size == stackRegion.start.size) { TODO: Double check why this is needed - val nextOffset = bitVectorOpToBigIntOp(binExpr.op, stackRegion.start, b.value) - reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, BitVecLiteral(nextOffset, 64)), n) - //} - case dataRegion: DataRegion => - //val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start, globalOffsets), b) - val nextOffset = bitVectorOpToBigIntOp(binExpr.op, dataRegion.start, b.value) - reducedRegions ++= exprToRegion(BitVecLiteral(nextOffset, 64), n) - case _ => - } } + toReturn.toSet + case _: BitVecLiteral => + Set.empty[MemoryRegion] + case _ => + //println(s"Unknown expression: ${i}") + //println(ctx) + exprToRegion(i.rhs, i) + } + val result = evaluateExpression(binExpr.arg2, constantProp(n)) + if (result.isDefined) { + val b = result.get + regions.foreach { + case stackRegion: StackRegion => + //println(s"StackRegion: ${stackRegion.start}") + //println(s"BitVecLiteral: ${b}") + //if (b.size == stackRegion.start.size) { TODO: Double check why this is needed + val nextOffset = bitVectorOpToBigIntOp(binExpr.op, stackRegion.start, b.value) + reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, BitVecLiteral(nextOffset, 64)), n) + //} + case dataRegion: DataRegion => + //val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start, globalOffsets), b) + val nextOffset = bitVectorOpToBigIntOp(binExpr.op, dataRegion.start, b.value) + reducedRegions ++= exprToRegion(BitVecLiteral(nextOffset, 64), n) + case _ => } } } @@ -181,7 +266,7 @@ class RegionInjector(domain: mutable.Set[CFGPosition], for (i <- ctx) { i.rhs match { case be: BinaryExpr => - res = res ++ exprToRegion(eval(i.rhs, i), n) + res = res ++ exprToRegion(i.rhs, n) case _ => } } @@ -215,88 +300,68 @@ class RegionInjector(domain: mutable.Set[CFGPosition], res } - /** Default implementation of eval. - */ - def eval(expr: Expr, cmd: Command): Expr = { - expr match - case literal: Literal => literal // ignore literals - case Extract(end, start, body) => - Extract(end, start, eval(body, cmd)) - case UninterpretedFunction(name, params, returnType) => - val newParams = params.map { p => eval(p, cmd) } - UninterpretedFunction(name, newParams, returnType) - case Repeat(repeats, body) => - Repeat(repeats, eval(body, cmd)) - case ZeroExtend(extension, body) => - ZeroExtend(extension, eval(body, cmd)) - case SignExtend(extension, body) => - SignExtend(extension, eval(body, cmd)) - case UnaryExpr(op, arg) => - UnaryExpr(op, eval(arg, cmd)) - case BinaryExpr(op, arg1, arg2) => - BinaryExpr(op, eval(arg1, cmd), eval(arg2, cmd)) - case MemoryLoad(mem, index, endian, size) => - // TODO: index should be replaced region - MemoryLoad(renameMemory(mem, index, cmd), eval(index, cmd), endian, size) - case variable: Variable => variable // ignore variables - } - def renameMemory(mem: Memory, expr: Expr, cmd : Command): Memory = { - val regions = exprToRegion(eval(expr, cmd), cmd) - if (regions.size == 1) { - Logger.debug(s"Mem CMD is: ${cmd}") - Logger.debug(s"Region found for mem: ${regions.head}") - regions.head match { - case stackRegion: StackRegion => - return StackMemory(stackRegion.regionIdentifier, mem.addressSize, mem.valueSize) - case dataRegion: DataRegion => - return SharedMemory(dataRegion.regionIdentifier, mem.addressSize, mem.valueSize) - case _ => - } - } else if (regions.size > 1) { - throw RuntimeException("Multiple regions found for memory") -// mmm.mergeRegions(regions) match { -// case stackRegion: StackRegion => -// return StackMemory(stackRegion.regionIdentifier, mem.addressSize, mem.valueSize) -// case dataRegion: DataRegion => -// return SharedMemory(dataRegion.regionIdentifier, mem.addressSize, mem.valueSize) -// case _ => -// } - } else { - Logger.debug(s"Mem CMD is: ${cmd}") - Logger.debug(s"No region found for expr ${expr} regions size is ${regions.size}") + def visitExpr(expr: Expr, cmd: Statement): Unit = { + expr match { + case Extract(_, _, body) => + visitExpr(body, cmd) + case UninterpretedFunction(_, params, _) => + params.foreach { + p => visitExpr(p, cmd) + } + case Repeat(_, body) => + visitExpr(body, cmd) + case ZeroExtend(_, body) => + visitExpr(body, cmd) + case SignExtend(_, body) => + visitExpr(body, cmd) + case UnaryExpr(_, arg) => + visitExpr(arg, cmd) + case BinaryExpr(_, arg1, arg2) => + visitExpr(arg1, cmd) + visitExpr(arg2, cmd) + case m: MemoryLoad => + val regions = exprToRegion(m.index, cmd) + accessToRegion(cmd) = regions + loadToMemory(cmd) = m.mem + case _ => } - mem } - /** Transfer function for state lattice elements. - */ - def localTransfer(n: CFGPosition): Unit = n match { + def visitStatement(n: Statement): Unit = n match { case assign: Assign => - assign.rhs = eval(assign.rhs, assign) - case mAssign: MemoryAssign => - mAssign.mem = renameMemory(mAssign.mem, mAssign.index, mAssign) - mAssign.index = eval(mAssign.index, mAssign) - mAssign.value = eval(mAssign.value, mAssign) + visitExpr(assign.rhs, assign) + case m: MemoryAssign => + val regions = exprToRegion(m.index, m) + accessToRegion(m) = regions case assert: Assert => - assert.body = eval(assert.body, assert) + visitExpr(assert.body, assert) case assume: Assume => - assume.body = eval(assume.body, assume) + visitExpr(assume.body, assume) case _ => // ignore other kinds of nodes } - def transformMemorySections(memorySegment: ArrayBuffer[MemorySection]): ArrayBuffer[MemorySection] = { - val newArrayBuffer = ArrayBuffer.empty[MemorySection] - for (mem <- memorySegment) { - val regions = mmm.findDataObject(mem.address) - if (regions.size == 1) { - newArrayBuffer += MemorySection(regions.head.regionIdentifier, mem.address, mem.size, mem.bytes) - Logger.debug(s"Region ${regions.get.regionIdentifier} found for memory section ${mem.address}") - } else { - newArrayBuffer += mem - Logger.debug(s"No region found for memory section ${mem.address}") + // replace memory renamer with something that creates map from access to region + // then handle all the merging required + // then do the renaming + // then get regions per procedure, handle initial memory with those + + def transformMemorySections(program: Program): Unit = { + val dataRegions = mergedRegions.keys.collect { case d: DataRegion => d } + + for (region <- dataRegions) { + program.initialMemoryLookup(region.start) match { + case Some(section) => + val bytes = section.getBytes(region.start, region.size) + // should probably check that region is entirely contained within section but shouldn't happen in practice? + val newSection = MemorySection(region.regionIdentifier, region.start, region.size, bytes, section.readOnly, Some(mergedRegions(region))) + program.usedMemory(region.start) = newSection + case None => } } - newArrayBuffer } +} + +class RegionRenamer(memory: Memory) extends Visitor { + override def visitMemory(node: Memory): Memory = memory } \ No newline at end of file diff --git a/src/main/scala/ir/Interpreter.scala b/src/main/scala/ir/Interpreter.scala index d3b48a549..53ef40c2d 100644 --- a/src/main/scala/ir/Interpreter.scala +++ b/src/main/scala/ir/Interpreter.scala @@ -322,9 +322,7 @@ class Interpreter() { def interpret(IRProgram: Program): mutable.Map[Variable, BitVecLiteral] = { // initialize memory array from IRProgram var currentAddress = BigInt(0) - IRProgram.initialMemory - .sortBy(_.address) - .foreach { im => + IRProgram.initialMemory.values.foreach { im => if (im.address + im.size > currentAddress) { val start = im.address.max(currentAddress) val data = if (im.address < currentAddress) im.bytes.slice((currentAddress - im.address).toInt, im.size) else im.bytes diff --git a/src/main/scala/ir/Program.scala b/src/main/scala/ir/Program.scala index d06e4b59c..49158d986 100644 --- a/src/main/scala/ir/Program.scala +++ b/src/main/scala/ir/Program.scala @@ -3,17 +3,18 @@ package ir import scala.collection.mutable.ArrayBuffer import scala.collection.{IterableOnceExtensionMethods, View, immutable, mutable} import boogie.* -import analysis.BitVectorEval +import analysis.{BitVectorEval, MergedRegion} import util.intrusive_list.* import translating.serialiseIL class Program(var procedures: ArrayBuffer[Procedure], var mainProcedure: Procedure, - var initialMemory: ArrayBuffer[MemorySection], - var readOnlyMemory: ArrayBuffer[MemorySection]) extends Iterable[CFGPosition] { + val initialMemory: mutable.TreeMap[BigInt, MemorySection]) extends Iterable[CFGPosition] { val threads: ArrayBuffer[ProgramThread] = ArrayBuffer() + val usedMemory = mutable.TreeMap[BigInt, MemorySection]() + override def toString(): String = { serialiseIL(this) } @@ -71,14 +72,12 @@ class Program(var procedures: ArrayBuffer[Procedure], * section in readOnlyMemory. It also takes the .rela.dyn entries taken from the readelf output and adds them to the * .rodata section, as they are the global offset table entries that we can assume are constant. */ - def determineRelevantMemory(rela_dyn: Map[BigInt, BigInt]): Unit = { - val initialMemoryNew = ArrayBuffer[MemorySection]() - val rodata = initialMemory.collect { case s if s.name == ".rodata" => s } - readOnlyMemory.addAll(rodata) - - val data = initialMemory.collect { case s if s.name == ".data" => s } - initialMemoryNew.addAll(data) + def determineRelevantMemory(rela_dyn: Map[BigInt, BigInt]): Unit = { + val rodata = initialMemory.values.collect { case s if s.name == ".rodata" => s } + rodata.foreach { r => usedMemory.addOne(r.address, r) } + val data = initialMemory.values.collect { case s if s.name == ".data" => s } + data.foreach { d => usedMemory.addOne(d.address, d) } // assuming little endian, adding the rela_dyn offset/address pairs like this is crude but is simplest for now for ((offset, address) <- rela_dyn) { @@ -88,10 +87,9 @@ class Program(var procedures: ArrayBuffer[Procedure], val high = low + 8 BitVectorEval.boogie_extract(high, low, addressBV) } - readOnlyMemory.append(MemorySection(s".got_$offset", offset.intValue, 8, bytes)) + usedMemory.addOne(offset, MemorySection(s".got_$offset", offset, 8, bytes, true, None)) } - initialMemory = initialMemoryNew } /** @@ -127,6 +125,20 @@ class Program(var procedures: ArrayBuffer[Procedure], ILUnorderedIterator(this) } + private def memoryLookup(memory: mutable.TreeMap[BigInt, MemorySection], address: BigInt) = { + memory.maxBefore(address + 1) match { + case Some(_, section) => + if (section.address + section.size > address) { + Some(section) + } else { + None + } + case _ => None + } + } + + def initialMemoryLookup(address: BigInt): Option[MemorySection] = memoryLookup(initialMemory, address) + def nameToProcedure: Map[String, Procedure] = { procedures.view.map(p => p.name -> p).toMap } @@ -456,4 +468,17 @@ object Block { * @param size number of bytes * @param bytes sequence of bytes represented by BitVecLiterals of size 8 */ -case class MemorySection(name: String, address: BigInt, size: Int, bytes: Seq[BitVecLiteral]) +case class MemorySection(name: String, address: BigInt, size: Int, bytes: Seq[BitVecLiteral], readOnly: Boolean, region: Option[MergedRegion] = None) { + + def getBytes(addr: BigInt, num: Int): Seq[BitVecLiteral] = { + val startIndex = (addr - address).toInt + for (i <- 0 until num) yield { + val index = startIndex + i + if (index >= bytes.size || index < 0) { + throw Exception("www") + } + bytes(startIndex + i) + } + } + +} \ No newline at end of file diff --git a/src/main/scala/ir/dsl/DSL.scala b/src/main/scala/ir/dsl/DSL.scala index 3ebeefbc4..a0d87f5f9 100644 --- a/src/main/scala/ir/dsl/DSL.scala +++ b/src/main/scala/ir/dsl/DSL.scala @@ -1,6 +1,7 @@ package ir.dsl import ir.* import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer import scala.collection.immutable.* val R0: Register = Register("R0", 64) @@ -146,9 +147,8 @@ def stack: SharedMemory = SharedMemory("stack", 64, 8) def prog(procedures: EventuallyProcedure*): Program = { require(procedures.nonEmpty) - val initialMemory = mutable.ArrayBuffer.empty[MemorySection] - val readOnlyMemory = mutable.ArrayBuffer.empty[MemorySection] - val p = Program(mutable.ArrayBuffer.from(procedures.map(_.tempProc)), procedures.map(_.tempProc).head, initialMemory, readOnlyMemory) + val initialMemory = mutable.TreeMap[BigInt, MemorySection]() + val p = Program(ArrayBuffer.from(procedures.map(_.tempProc)), procedures.map(_.tempProc).head, initialMemory) procedures.foreach(_.resolve(p)) p diff --git a/src/main/scala/translating/BAPToIR.scala b/src/main/scala/translating/BAPToIR.scala index a4b3aba19..908978046 100644 --- a/src/main/scala/translating/BAPToIR.scala +++ b/src/main/scala/translating/BAPToIR.scala @@ -8,6 +8,7 @@ import specification.* import scala.collection.mutable import scala.collection.mutable.Map import scala.collection.mutable.ArrayBuffer +import scala.collection.mutable.TreeMap import util.intrusive_list.* class BAPToIR(var program: BAPProgram, mainAddress: BigInt) { @@ -62,17 +63,18 @@ class BAPToIR(var program: BAPProgram, mainAddress: BigInt) { } - val memorySections: ArrayBuffer[MemorySection] = ArrayBuffer() + val memorySections: mutable.TreeMap[BigInt, MemorySection] = mutable.TreeMap() for (m <- program.memorySections) { val bytes = if (m.name == ".bss" && m.bytes.isEmpty) { for (_ <- 0 until m.size) yield BitVecLiteral(0, 8) } else { m.bytes.map(_.toIR) } - memorySections.append(MemorySection(m.name, m.address, m.size, bytes)) + val readOnly = m.name == ".rodata" || m.name == ".got" // crude heuristic + memorySections.addOne(m.address, MemorySection(m.name, m.address, m.size, bytes, readOnly, None)) } - Program(procedures, mainProcedure.get, memorySections, ArrayBuffer()) + Program(procedures, mainProcedure.get, memorySections) } private def translate(s: BAPStatement) = s match { diff --git a/src/main/scala/translating/GTIRBToIR.scala b/src/main/scala/translating/GTIRBToIR.scala index fab10dc3b..bf8a26ad9 100644 --- a/src/main/scala/translating/GTIRBToIR.scala +++ b/src/main/scala/translating/GTIRBToIR.scala @@ -200,8 +200,8 @@ class GTIRBToIR(mods: Seq[Module], parserMap: immutable.Map[String, Array[Array[ val sections = mods.flatMap(_.sections) - val initialMemory: ArrayBuffer[MemorySection] = ArrayBuffer() - sections.map {elem => + val initialMemory: mutable.TreeMap[BigInt, MemorySection] = mutable.TreeMap() + sections.map { elem => val bytesToInt = elem.byteIntervals.head.contents.toByteArray.map(byte => BigInt(byte)) val size = elem.byteIntervals.head.size.toInt val bytes = if (elem.name == ".bss" && bytesToInt.isEmpty) { @@ -215,14 +215,15 @@ class GTIRBToIR(mods: Seq[Module], parserMap: immutable.Map[String, Array[Array[ } }.toSeq } - val section = MemorySection(elem.name, BigInt(elem.byteIntervals.head.address), size, bytes) - initialMemory += section + val readOnly = elem.name == ".rodata" || elem.name == ".got" // crude heuristic for now + val address = BigInt(elem.byteIntervals.head.address) + val section = MemorySection(elem.name, address, size, bytes, readOnly, None) + initialMemory += (address -> section) } - val readOnlyMemory: ArrayBuffer[MemorySection] = ArrayBuffer() val intialProc: Procedure = procedures.find(_.address.get == mainAddress).get - Program(procedures, intialProc, initialMemory, readOnlyMemory) + Program(procedures, intialProc, initialMemory) } private def removePCAssign(block: Block): Option[String] = { diff --git a/src/main/scala/translating/IRToBoogie.scala b/src/main/scala/translating/IRToBoogie.scala index 48c4eb9d2..f63c71ad5 100644 --- a/src/main/scala/translating/IRToBoogie.scala +++ b/src/main/scala/translating/IRToBoogie.scala @@ -4,6 +4,7 @@ import boogie.* import specification.* import util.{BoogieGeneratorConfig, BoogieMemoryAccessMode, ProcRelyVersion} +import scala.collection.mutable import scala.collection.mutable.ArrayBuffer class IRToBoogie(var program: Program, var spec: Specification, var thread: Option[ProgramThread], val filename: String) { @@ -52,14 +53,17 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti def translate(boogieGeneratorConfig: BoogieGeneratorConfig): BProgram = { config = boogieGeneratorConfig - val readOnlyMemory = memoryToCondition(program.readOnlyMemory) + val readOnlySections = program.usedMemory.values.filter(_.readOnly) + val readOnlyMemory = memoryToCondition(readOnlySections) + val initialSections = program.usedMemory.values.filter(!_.readOnly) + val initialMemory = memoryToCondition(initialSections) val procedures = thread match { case None => - program.procedures.map(f => translateProcedure(f, readOnlyMemory)) + program.procedures.map(f => translateProcedure(f, readOnlyMemory, initialMemory)) case Some(t) => val translatedProcedures = ArrayBuffer[BProcedure]() - t.procedures.foreach(p => translatedProcedures.addOne(translateProcedure(p, readOnlyMemory))) + t.procedures.foreach(p => translatedProcedures.addOne(translateProcedure(p, readOnlyMemory, initialMemory))) translatedProcedures } val defaultGlobals = List(BVarDecl(mem, List(externAttr)), BVarDecl(Gamma_mem, List(externAttr))) @@ -432,7 +436,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti } - def translateProcedure(p: Procedure, readOnlyMemory: List[BExpr]): BProcedure = { + def translateProcedure(p: Procedure, readOnlyMemory: List[BExpr], initialMemory: List[BExpr]): BProcedure = { val body = (p.entryBlock.view ++ p.blocks.filterNot(x => p.entryBlock.contains(x))).map(translateBlock).toList val callsRely: Boolean = body.flatMap(_.body).exists(_ match @@ -455,7 +459,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti val procEnsuresDirect: List[String] = ensuresDirect.getOrElse(p.name, List()) val freeRequires: List[BExpr] = if (p == program.mainProcedure) { - memoryToCondition(program.initialMemory) ++ readOnlyMemory + initialMemory ++ readOnlyMemory } else { readOnlyMemory } @@ -477,10 +481,10 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti ) } - private def memoryToCondition(memory: ArrayBuffer[MemorySection]): List[BExpr] = { + private def memoryToCondition(memorySections: Iterable[MemorySection]): List[BExpr] = { def coalesced: List[BExpr] = { - val sections = memory.flatMap { s => + val sections = memorySections.flatMap { s => // Phrase the memory condition in terms of 64-bit operations, as long as the memory // section's size is a multiple of 64-bits and 64-bits (8 bytes) aligned // If the memory section is not aligned, the initial unaligned part of it will not be coalesced into a 64-bit @@ -488,6 +492,10 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti // If the memory section's size is not a multiple of 64-bits, the last part of it that cannot be coalesced into // a 64-bit representation will remain as an 8-bit representation + val memory = s.region match { + case Some(region) => BMapVar(region.name, MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) + case None => mem + } val aligned: Int = (s.address % 8).toInt val alignedSizeMultiple = (s.bytes.size - aligned) % 8 @@ -501,7 +509,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti (0 until 8).foldLeft(BigInt(0))((x, y) => x + (s.bytes(b + y).value * BigInt(2).pow(y * 8))) BinaryBExpr( BVEQ, - BMemoryLoad(mem, BitVecBLiteral(s.address + b, 64), Endian.LittleEndian, 64), + BMemoryLoad(memory, BitVecBLiteral(s.address + b, 64), Endian.LittleEndian, 64), BitVecBLiteral(combined, 64) ) } @@ -514,7 +522,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti for (b <- 0 until aligned) yield { BinaryBExpr( BVEQ, - BMemoryLoad(mem, BitVecBLiteral(s.address + b, 64), Endian.LittleEndian, 8), + BMemoryLoad(memory, BitVecBLiteral(s.address + b, 64), Endian.LittleEndian, 8), s.bytes(b).toBoogie ) } @@ -532,7 +540,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti for (b <- alignedEnd until s.bytes.size) yield { BinaryBExpr( BVEQ, - BMemoryLoad(mem, BitVecBLiteral(s.address + b, 64), Endian.LittleEndian, 8), + BMemoryLoad(memory, BitVecBLiteral(s.address + b, 64), Endian.LittleEndian, 8), s.bytes(b).toBoogie ) } @@ -543,11 +551,15 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti } def bytes: List[BExpr] = { - val sections = memory.flatMap { s => + val sections = memorySections.flatMap { s => + val memory = s.region match { + case Some(region) => BMapVar(region.name, MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) + case None => mem + } for (b <- s.bytes.indices) yield { BinaryBExpr( BVEQ, - BMemoryLoad(mem, BitVecBLiteral(s.address + b, 64), Endian.LittleEndian, 8), + BMemoryLoad(memory, BitVecBLiteral(s.address + b, 64), Endian.LittleEndian, 8), s.bytes(b).toBoogie ) } diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 185018cdc..64c652385 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -215,7 +215,9 @@ object IRTransform { * add in modifies from the spec. */ def prepareForTranslation(config: BASILConfig, ctx: IRContext): Unit = { - ctx.program.determineRelevantMemory(ctx.globalOffsets) + if (config.staticAnalysis.isEmpty) { + ctx.program.determineRelevantMemory(ctx.globalOffsets) + } Logger.debug("[!] Stripping unreachable") val before = ctx.program.procedures.size @@ -315,8 +317,6 @@ object StaticAnalysis { val mergedSubroutines = subroutines ++ externalAddresses - val domain = computeDomain(IntraProcIRCursor, IRProgram.procedures) - Logger.debug("[!] Running ANR") val ANRSolver = ANRAnalysisSolver(IRProgram) val ANRResult = ANRSolver.analyze() @@ -409,7 +409,7 @@ object StaticAnalysis { mmm.logRegions(memoryRegionContents) Logger.debug("[!] Injecting regions") - val regionInjector = RegionInjector(domain, IRProgram, constPropResult, mmm, reachingDefinitionsAnalysisResults, globalOffsets) + val regionInjector = RegionInjector(IRProgram, constPropResult, mmm, reachingDefinitionsAnalysisResults) regionInjector.nodeVisitor() Logger.debug("[!] Running VSA") @@ -511,7 +511,9 @@ object RunUtils { IRTransform.doCleanup(ctx) q.loading.dumpIL.foreach(s => writeToFile(serialiseIL(ctx.program), s"$s-before-analysis.il")) - val analysis = q.staticAnalysis.map(conf => staticAnalysis(conf, ctx)) + val analysis = q.staticAnalysis.map { + conf => staticAnalysis(conf, ctx) + } q.loading.dumpIL.foreach(s => writeToFile(serialiseIL(ctx.program), s"$s-after-analysis.il")) if (q.runInterpret) { From 846b020c4747aca253c4eca12f4b7b3ad306f88d Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Mon, 30 Sep 2024 10:39:05 +1000 Subject: [PATCH 063/104] Changes in MRA and made Reaching def interproc --- .../one_function_multi_call.c | 23 +++++ src/main/scala/analysis/Analysis.scala | 17 ++-- .../InterprocSteensgaardAnalysis.scala | 78 +++++++++------- src/main/scala/analysis/MemoryModelMap.scala | 36 +++++--- .../scala/analysis/MemoryRegionAnalysis.scala | 90 ++++++++++++++++--- .../ReachingDefinitionsAnalysis.scala | 7 +- src/main/scala/analysis/RegionInjector.scala | 2 +- src/main/scala/ir/IRCursor.scala | 31 ++++--- .../transforms/IndirectCallResolution.scala | 2 +- src/main/scala/util/RunUtils.scala | 34 +++---- 10 files changed, 220 insertions(+), 100 deletions(-) create mode 100644 examples/one_function_multi_call/one_function_multi_call.c diff --git a/examples/one_function_multi_call/one_function_multi_call.c b/examples/one_function_multi_call/one_function_multi_call.c new file mode 100644 index 000000000..4aab3dd14 --- /dev/null +++ b/examples/one_function_multi_call/one_function_multi_call.c @@ -0,0 +1,23 @@ +#include + +// Function declarations +int addNumbers(int a, int b); + +int callAddFromAnotherFunction(int x, int y) { + return addNumbers(x, y); +} + +int callFromFun2(int x, int y) { + return addNumbers(x, y); +} + +int addNumbers(int a, int b) { + return a + b; +} + +int main() { + int resultFromMain = addNumbers(10, 5); + int resultFromOtherFunc = callAddFromAnotherFunction(20, 15); + int resultFromFun2 = callFromFun2(30, 25); + return 0; +} diff --git a/src/main/scala/analysis/Analysis.scala b/src/main/scala/analysis/Analysis.scala index 9ab736160..5a8e6ff9a 100644 --- a/src/main/scala/analysis/Analysis.scala +++ b/src/main/scala/analysis/Analysis.scala @@ -22,7 +22,7 @@ trait Analysis[+R]: /** Base class for value analysis with simple (non-lifted) lattice. */ -trait ConstantPropagation(val program: Program) { +trait ConstantPropagation(val program: Program, val assumeR31: Boolean) { /** The lattice of abstract states. */ @@ -76,15 +76,19 @@ trait ConstantPropagation(val program: Program) { /** Transfer function for state lattice elements. */ def localTransfer(n: CFGPosition, s: Map[Variable, FlatElement[BitVecLiteral]]): Map[Variable, FlatElement[BitVecLiteral]] = + var m = s n match case r: Command => + if (assumeR31 && IRWalk.procedure(n).entryBlock.isDefined && IRWalk.firstInBlock(program.mainProcedure.entryBlock.get) == n) { + m = m + (Register("R31", 64) -> eval(BitVecLiteral(Long.MaxValue, 64), m)) + } r match // assignments case la: Assign => - s + (la.lhs -> eval(la.rhs, s)) + m + (la.lhs -> eval(la.rhs, m)) // all others: like no-ops - case _ => s - case _ => s + case _ => m + case _ => m /** The analysis lattice. */ @@ -97,11 +101,12 @@ trait ConstantPropagation(val program: Program) { def transfer(n: CFGPosition, s: Map[Variable, FlatElement[BitVecLiteral]]): Map[Variable, FlatElement[BitVecLiteral]] = localTransfer(n, s) } -class ConstantPropagationSolver(program: Program) extends ConstantPropagation(program) +class ConstantPropagationSolver(program: Program, assumeR31: Boolean = false) extends ConstantPropagation(program, assumeR31) with SimplePushDownWorklistFixpointSolver[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]], MapLattice[Variable, FlatElement[BitVecLiteral], ConstantPropagationLattice]] - with IRIntraproceduralForwardDependencies + with IRInterproceduralForwardDependencies with Analysis[Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]]] + /** Base class for value analysis with simple (non-lifted) lattice. */ trait ConstantPropagationWithSSA(val program: Program, val reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]) { diff --git a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala index 82e8dc8b8..c3431a29c 100644 --- a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala +++ b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala @@ -34,12 +34,11 @@ case class RegisterWrapperEqualSets(variable: Variable, assigns: Set[Assign]) { } /** Steensgaard-style pointer analysis. The analysis associates an [[StTerm]] with each variable declaration and - * expression node in the AST. It is implemented using [[analysis.solvers.UnionFindSolver]]. - */ + * expression node in the AST. It is implemented using [[analysis.solvers.UnionFindSolver]]. + */ class InterprocSteensgaardAnalysis( program: Program, constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - regionAccesses: Map[CFGPosition, Map[RegisterVariableWrapper, FlatElement[Expr]]], mmm: MemoryModelMap, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], globalOffsets: Map[BigInt, BigInt]) extends Analysis[Any] { @@ -61,11 +60,6 @@ class InterprocSteensgaardAnalysis( def getMemoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]] = memoryRegionContents.map((k, v) => k -> v.toSet).toMap - private def nextMallocCount() = { - mallocCount += 1 - s"malloc_$mallocCount" - } - /** * In expressions that have accesses within a region, we need to relocate * the base address to the actual address using the relocation table. @@ -157,12 +151,12 @@ class InterprocSteensgaardAnalysis( r match { case stackRegion: StackRegion => val nextOffset = bitVectorOpToBigIntOp(binExpr.op, stackRegion.start, b.value) - reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, BitVecLiteral(nextOffset, 64)), n) + reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, BitVecLiteral(nextOffset, 64)), i) case dataRegion: DataRegion => val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start), b) val b2 = evaluateExpression(nextOffset, constantProp(n)) if (b2.isDefined) { - reducedRegions ++= exprToRegion(b2.get, n) + reducedRegions ++= exprToRegion(b2.get, i) } case _ => } @@ -249,8 +243,17 @@ class InterprocSteensgaardAnalysis( } } + // def exprToRegion(expr: Expr, cmd: Command): Option[MemoryRegion] = { + // val isGlobal = evaluateExpression(expr, constantProp(cmd)) + // if (isGlobal.isDefined) { + // mmm.findDataObject(isGlobal.get.value) + // } else { + // mmm.getStack((cmd, expr)) + // } + // } + /** @inheritdoc - */ + */ def analyze(): Unit = // generate the constraints by traversing the AST and solve them on-the-fly program.procedures.foreach(p => { @@ -258,11 +261,11 @@ class InterprocSteensgaardAnalysis( }) /** Generates the constraints for the given sub-AST. - * @param node - * the node for which it generates the constraints - * @param arg - * unused for this visitor - */ + * @param node + * the node for which it generates the constraints + * @param arg + * unused for this visitor + */ def visit(node: CFGPosition, arg: Unit): Unit = { if (visited.contains(node)) { return @@ -284,6 +287,11 @@ class InterprocSteensgaardAnalysis( exprToRegion(binOp, cmd).foreach( x => unify(IdentifierVariable(RegisterVariableWrapper(assign.lhs, getDefinition(assign.lhs, cmd, reachingDefs))), PointerRef(AllocVariable(x))) ) + case variable: Variable => + // X1 = X2: [[X1]] = [[X2]] + val X1 = assign.lhs + val X2 = variable + unify(IdentifierVariable(RegisterVariableWrapper(X1, getDefinition(X1, cmd, reachingDefs))), IdentifierVariable(RegisterVariableWrapper(X2, getUse(X2, cmd, reachingDefs)))) // TODO: should lookout for global base + offset case as well case _ => unwrapExpr(assign.rhs).foreach { @@ -309,17 +317,25 @@ class InterprocSteensgaardAnalysis( $X2.foreach( x => unify(IdentifierVariable(RegisterVariableWrapper(assign.lhs, getDefinition(assign.lhs, cmd, reachingDefs))), PointerRef(AllocVariable(x))) ) - case variable: Variable => - // X1 = X2: [[X1]] = [[X2]] - val X1 = assign.lhs - val X2 = variable - unify(IdentifierVariable(RegisterVariableWrapper(X1, getDefinition(X1, cmd, reachingDefs))), IdentifierVariable(RegisterVariableWrapper(X2, getUse(X2, cmd, reachingDefs)))) case _ => // do nothing } } case memoryAssign: MemoryAssign => // *X1 = X2: [[X1]] = ↑a ^ [[X2]] = a where a is a fresh term variable - val X1_star = exprToRegion(memoryAssign.index, cmd) + val X1_star1 = exprToRegion(memoryAssign.index, cmd) + val X1_star = X1_star1.foldLeft(Set[MemoryRegion]()) { + case (acc, x) => + if (!memoryRegionContents.contains(x)) { + memoryRegionContents.addOne(x -> mutable.Set()) + } + val found = memoryRegionContents(x).filter(r => r.isInstanceOf[MemoryRegion]).map(r => r.asInstanceOf[MemoryRegion]) + if (found.nonEmpty) { + // get just the memory regions from the region contents + acc ++ found + } else { + acc + x + } + } val X2 = evaluateExpression(memoryAssign.value, constantProp(cmd)) // TODO: This is risky as it tries to coerce every value to a region (needed for functionpointer example) val possibleRegions = exprToRegion(memoryAssign.value, cmd) @@ -357,7 +373,7 @@ class InterprocSteensgaardAnalysis( } /** @inheritdoc - */ + */ def pointsTo(): Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]] = { val solution = solver.solution() val unifications = solver.unifications() @@ -378,7 +394,7 @@ class InterprocSteensgaardAnalysis( } /** @inheritdoc - */ + */ def mayAlias(): (RegisterVariableWrapper, RegisterVariableWrapper) => Boolean = { val solution = solver.solution() (id1: RegisterVariableWrapper, id2: RegisterVariableWrapper) => @@ -389,32 +405,32 @@ class InterprocSteensgaardAnalysis( } /** Terms used in unification. - */ + */ sealed trait StTerm /** A term variable that represents an alloc in the program. - */ + */ case class AllocVariable(alloc: MemoryRegion) extends StTerm with Var[StTerm] { override def toString: String = s"alloc{${alloc}}" } /** A term variable that represents an identifier in the program. - */ + */ case class IdentifierVariable(id: RegisterVariableWrapper) extends StTerm with Var[StTerm] { override def toString: String = s"$id" } /** A term variable that represents an expression in the program. - */ + */ case class ExpressionVariable(expr: MemoryRegion | Expr) extends StTerm with Var[StTerm] { override def toString: String = s"$expr" } /** A fresh term variable. - */ + */ case class FreshVariable(var id: Int = 0) extends StTerm with Var[StTerm] { id = Fresh.next() @@ -423,7 +439,7 @@ case class FreshVariable(var id: Int = 0) extends StTerm with Var[StTerm] { } /** A constructor term that represents a pointer to another term. - */ + */ case class PointerRef(of: Term[StTerm]) extends StTerm with Cons[StTerm] { val args: List[Term[StTerm]] = List(of) @@ -434,7 +450,7 @@ case class PointerRef(of: Term[StTerm]) extends StTerm with Cons[StTerm] { } /** Counter for producing fresh IDs. - */ + */ object Fresh { var n = 0 diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index 7a7b9ea1c..b4cd9401b 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -22,7 +22,7 @@ class MemoryModelMap { private val MAX_BIGINT: BigInt = BigInt(Long.MaxValue) private val contextStack = mutable.Stack.empty[String] private val sharedContextStack = mutable.Stack.empty[List[StackRegion]] - private val localStacks = mutable.Map[String, List[StackRegion]]() + private val localStacks = mutable.Map[String, List[StackRegion]]().withDefaultValue(List.empty) private val sharedStacks = mutable.Map[String, List[StackRegion]]() private val stackMap: mutable.Map[RangeKey, StackRegion] = mutable.TreeMap() @@ -33,6 +33,8 @@ class MemoryModelMap { private val dataMap: mutable.Map[RangeKey, DataRegion] = mutable.TreeMap() private val heapCalls: mutable.Map[DirectCall, HeapRegion] = mutable.Map() + private val stackAllocationSites: mutable.Map[(CFGPosition, Expr), StackRegion] = mutable.Map() + private val uf = new UnionFind() /** Add a range and object to the mapping @@ -76,7 +78,7 @@ class MemoryModelMap { if (offset <= currentMaxRange.end) { currentStackMap.remove(currentMaxRange) currentMaxRegion.fields += offset - val updatedRange = RangeKey(currentMaxRange.start, offset + maxSize(region) - 1) + val updatedRange = RangeKey(currentMaxRange.start, (maxSize(region) - 1).max(currentMaxRange.end)) currentStackMap.addOne(updatedRange -> currentMaxRegion) for (elem <- region.fields) { currentMaxRegion.fields += offset + elem @@ -137,13 +139,22 @@ class MemoryModelMap { tableAddress } - def convertMemoryRegions(stackRegionsPerProcedure: mutable.Map[Procedure, mutable.Set[StackRegion]], heapRegions: mutable.Map[DirectCall, HeapRegion], mergeRegions: mutable.Set[Set[MemoryRegion]], externalFunctions: Map[BigInt, String], globalOffsets: Map[BigInt, BigInt], globalAddresses: Map[BigInt, String], globalSizes: Map[String, Int], procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]]): Unit = { + def preLoadGlobals(externalFunctions: Map[BigInt, String], globalOffsets: Map[BigInt, BigInt], globalAddresses: Map[BigInt, String], globalSizes: Map[String, Int]): Unit = { // map externalFunctions name, value to DataRegion(name, value) and then sort by value val reversedExternalFunctionRgns = externalFunctions.map((offset, name) => resolveInverseGlobalOffset(name, offset, globalOffsets) -> name) val filteredGlobalOffsets = globalAddresses.filterNot((offset, name) => reversedExternalFunctionRgns.contains(offset)) - val externalFunctionRgns = (reversedExternalFunctionRgns ++ filteredGlobalOffsets).map((offset, name) => DataRegion(name, offset, (globalSizes.getOrElse(name, 1).toDouble/8).ceil.toInt)) + val externalFunctionRgns = (reversedExternalFunctionRgns ++ filteredGlobalOffsets).map((offset, name) => DataRegion(name, offset, (globalSizes.getOrElse(name, 1).toDouble / 8).ceil.toInt)) + + // add externalFunctionRgn to dataRgns and sort by value + val allDataRgns = externalFunctionRgns.toList.sortBy(_.start) + for (dataRgn <- allDataRgns) { + add(dataRgn.start, dataRgn) + } + } + def convertMemoryRegions(stackRegionsPerProcedure: mutable.Map[Procedure, mutable.Set[StackRegion]], heapRegions: mutable.Map[DirectCall, HeapRegion], mergeRegions: mutable.Set[Set[MemoryRegion]], allocationSites: mutable.Map[(CFGPosition, Expr), StackRegion], procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]]): Unit = { + stackAllocationSites ++= allocationSites stackRegionsPerProcedure.keys.foreach(exitNode => if (procedureToSharedRegions.contains(exitNode)) { val sharedRegions = procedureToSharedRegions(exitNode) @@ -153,11 +164,6 @@ class MemoryModelMap { val stackRgns = stackRegionsPerProcedure(exitNode).toList.sortBy(_.start) localStacks(exitNode.name) = stackRgns ) - // add externalFunctionRgn to dataRgns and sort by value - val allDataRgns = externalFunctionRgns.toList.sortBy(_.start) - for (dataRgn <- allDataRgns) { - add(dataRgn.start, dataRgn) - } heapCalls ++= heapRegions // add heap regions @@ -393,7 +399,7 @@ class MemoryModelMap { logRegion(range, region, true) } } - Logger.debug("Stack Root:") + Logger.debug("Stack Union-Find Roots:") for name <- localStacks.keys do popContext() pushContext(name) @@ -407,6 +413,11 @@ class MemoryModelMap { parentCount += 1 } if parentCount == 0 then Logger.debug(" No root regions") else Logger.debug(s" Parents: $parentCount/${stackMap.size}") + Logger.debug("Shared Stacks:") + for (name, sharedStacks) <- sharedStacks do + Logger.debug(s" Function: $name") + for region <- sharedStacks do + Logger.debug(s" $region") Logger.debug("Heap:") for ((range, region) <- heapMap) { logRegion(range, region) @@ -444,6 +455,11 @@ class MemoryModelMap { require(directCall.target.name == "malloc", "Should be a malloc call") heapCalls(directCall) } + + def getStack(allocationSite: (CFGPosition, Expr)): Option[StackRegion] = { + val stackRegion = stackAllocationSites.get(allocationSite) + if stackRegion.isDefined then Some(returnRegion(stackAllocationSites(allocationSite))) else None + } } trait MemoryRegion { diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 3653f5cc4..80e38ef4e 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -15,8 +15,9 @@ trait MemoryRegionAnalysis(val program: Program, val constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], val ANRResult: Map[CFGPosition, Set[Variable]], val RNAResult: Map[CFGPosition, Set[Variable]], - val regionAccesses: Map[CFGPosition, Map[RegisterVariableWrapper, FlatElement[Expr]]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]) { + val reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + val offSetApproximation: Boolean = false, + val mmm: MemoryModelMap) { var mallocCount: Int = 0 private var stackCount: Int = 0 @@ -99,6 +100,7 @@ trait MemoryRegionAnalysis(val program: Program, var procedureToHeapRegions: mutable.Map[DirectCall, HeapRegion] = mutable.Map() var memLoadToRegion: mutable.Map[MemoryLoad, MemoryRegion] = mutable.Map() var mergeRegions: mutable.Set[Set[MemoryRegion]] = mutable.Set() + var allocationSites: mutable.Map[(CFGPosition, Expr), StackRegion] = mutable.Map() def addMergableRegions(regions: Set[MemoryRegion]): Unit = { mergeRegions.add(regions) @@ -116,6 +118,10 @@ trait MemoryRegionAnalysis(val program: Program, memLoadToRegion.put(memoryLoad, memoryRegion) } + def addAllocationSite(memory: (CFGPosition, Expr), stackRegion: StackRegion): Unit = { + allocationSites.put(memory, stackRegion) + } + def reducibleToRegion(binExpr: BinaryExpr, n: Command, subAccess: BigInt): Set[MemoryRegion] = { var reducedRegions = Set.empty[MemoryRegion] binExpr.arg1 match { @@ -142,7 +148,7 @@ trait MemoryRegionAnalysis(val program: Program, } } case _ => - eval(binExpr, Set.empty, n, subAccess) + reducedRegions = reducedRegions ++ eval(binExpr, Set.empty, n, subAccess) } reducedRegions } @@ -240,23 +246,82 @@ trait MemoryRegionAnalysis(val program: Program, s } case memAssign: MemoryAssign => + if (evaluateExpression(memAssign.index, constantProp(n)).isDefined) { + return s // skip global memory regions + } val result = eval(memAssign.index, s, cmd, memAssign.size) if (result.size > 1) { //throw new Exception(s"Memory load resulted in multiple regions ${result} for mem load $memoryLoad") addMergableRegions(result) } + if (result.nonEmpty) { + addAllocationSite((cmd, memAssign.index), result.head.asInstanceOf[StackRegion]) + } regionLattice.lub(s, result) case assign: Assign => stackDetection(assign) var m = s unwrapExpr(assign.rhs).foreach { case memoryLoad: MemoryLoad => - val result = eval(memoryLoad.index, s, cmd, memoryLoad.size) - if (result.size > 1) { - //throw new Exception(s"Memory load resulted in multiple regions ${result} for mem load $memoryLoad") - addMergableRegions(result) + if (evaluateExpression(memoryLoad.index, constantProp(n)).isEmpty) { // skip global memory regions + val result = eval(memoryLoad.index, s, cmd, memoryLoad.size) + if (result.size > 1) { + //throw new Exception(s"Memory load resulted in multiple regions ${result} for mem load $memoryLoad") + addMergableRegions(result) + } + if (result.nonEmpty) { + addAllocationSite((cmd, memoryLoad.index), result.head.asInstanceOf[StackRegion]) + } + m = regionLattice.lub(m, result) + } + case _ => m + } + m + case _ => s + } + case _ => s // ignore other kinds of nodes + } + + def localTransfer2(n: CFGPosition, s: Set[MemoryRegion]): Set[MemoryRegion] = n match { + case cmd: Command => + cmd match { + case directCall: DirectCall => + if (directCall.target.name == "malloc") { + evaluateExpression(mallocVariable, constantProp(n)) match { + case Some(b: BitVecLiteral) => + val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value + val newHeapRegion = HeapRegion(nextMallocCount(), negB, IRWalk.procedure(n)) + addReturnHeap(directCall, newHeapRegion) + regionLattice.lub(s, Set(newHeapRegion)) + case None => s + } + } else { + s + } + case memAssign: MemoryAssign => + val evaluation = evaluateExpression(memAssign.index, constantProp(n)) + if (evaluation.isDefined) { + val isGlobal = mmm.findDataObject(evaluation.get.value) + if (isGlobal.isEmpty) { + val result = poolMaster(Long.MaxValue - evaluation.get.value, IRWalk.procedure(n), memAssign.size) + addAllocationSite((cmd, memAssign.index), result) + return regionLattice.lub(s, Set(result)) + } + } + s + case assign: Assign => + var m = s + unwrapExpr(assign.rhs).foreach { + case memoryLoad: MemoryLoad => + val evaluation = evaluateExpression(memoryLoad.index, constantProp(n)) + if (evaluation.isDefined) { + val isGlobal = mmm.findDataObject(evaluation.get.value) + if (isGlobal.isEmpty) { + val result = poolMaster(Long.MaxValue - evaluation.get.value, IRWalk.procedure(n), memoryLoad.size) + addAllocationSite((cmd, memoryLoad.index), result) + m = regionLattice.lub(s, Set(result)) + } } - m = regionLattice.lub(m, result) case _ => m } m @@ -265,7 +330,7 @@ trait MemoryRegionAnalysis(val program: Program, case _ => s // ignore other kinds of nodes } - def transferUnlifted(n: CFGPosition, s: Set[MemoryRegion]): Set[MemoryRegion] = localTransfer(n, s) + def transferUnlifted(n: CFGPosition, s: Set[MemoryRegion]): Set[MemoryRegion] = if offSetApproximation then localTransfer2(n, s) else localTransfer(n, s) } class MemoryRegionAnalysisSolver( @@ -276,9 +341,10 @@ class MemoryRegionAnalysisSolver( constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], ANRResult: Map[CFGPosition, Set[Variable]], RNAResult: Map[CFGPosition, Set[Variable]], - regionAccesses: Map[CFGPosition, Map[RegisterVariableWrapper, FlatElement[Expr]]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])] - ) extends MemoryRegionAnalysis(program, globals, globalOffsets, subroutines, constantProp, ANRResult, RNAResult, regionAccesses, reachingDefs) + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + offSetApproximation: Boolean, + mmm: MemoryModelMap + ) extends MemoryRegionAnalysis(program, globals, globalOffsets, subroutines, constantProp, ANRResult, RNAResult, reachingDefs, offSetApproximation, mmm) with IRIntraproceduralForwardDependencies with Analysis[Map[CFGPosition, LiftedElement[Set[MemoryRegion]]]] with WorklistFixpointSolverWithReachability[CFGPosition, Set[MemoryRegion], PowersetLattice[MemoryRegion]] { diff --git a/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala b/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala index 3bb8c01dd..818886d1e 100644 --- a/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala +++ b/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala @@ -3,7 +3,7 @@ package analysis import ir.* import analysis.solvers.SimpleWorklistFixpointSolver -case class ReachingDefinitionsAnalysis(program: Program) { +case class ReachingDefinitionsAnalysis(program: Program, inter: Boolean = false) { type Definition = Assign // local assign is a definition because it is a statement and statements are assumed to be unique type TupleElement = @@ -77,7 +77,8 @@ case class ReachingDefinitionsAnalysis(program: Program) { transformUses(assume.body.variables, s) case indirectCall: IndirectCall => transformUses(indirectCall.target.variables, s) - case directCall: DirectCall if directCall.target.name == "malloc" => + // if we do interproc analysis then there is no need to make any special assumptions about malloc + case directCall: DirectCall if directCall.target.name == "malloc" && !inter => // assume R0 has been assigned, generate a fake definition val mallocVar = Register("R0", 64) val mallocDef = generateUniqueDefinition(mallocVar) @@ -96,6 +97,6 @@ class ReachingDefinitionsAnalysisSolver(program: Program) with IRIntraproceduralForwardDependencies class InterprocReachingDefinitionsAnalysisSolver(program: Program) - extends ReachingDefinitionsAnalysis(program) + extends ReachingDefinitionsAnalysis(program, true) with SimpleWorklistFixpointSolver[CFGPosition, (Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]], Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]]), ReachingDefinitionsAnalysis#TupleElement] with IRInterproceduralForwardDependencies \ No newline at end of file diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index 11ed03bd8..5e0975fcf 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -262,7 +262,7 @@ class RegionInjector(domain: mutable.Set[CFGPosition], case _ => } } else if (regions.size > 1) { - throw RuntimeException("Multiple regions found for memory") + //throw RuntimeException("Multiple regions found for memory") // mmm.mergeRegions(regions) match { // case stackRegion: StackRegion => // return StackMemory(stackRegion.regionIdentifier, mem.addressSize, mem.valueSize) diff --git a/src/main/scala/ir/IRCursor.scala b/src/main/scala/ir/IRCursor.scala index 21944d4f1..80d2acdc5 100644 --- a/src/main/scala/ir/IRCursor.scala +++ b/src/main/scala/ir/IRCursor.scala @@ -148,27 +148,21 @@ object IntraProcBlockIRCursor extends IntraProcBlockIRCursor trait InterProcIRCursor extends IRWalk[CFGPosition, CFGPosition] { final def succ(pos: CFGPosition): Set[CFGPosition] = { - IntraProcIRCursor.succ(pos) ++ - (pos match + pos match case c: DirectCall if c.target.blocks.nonEmpty => Set(c.target) case c: Return => c.parent.parent.incomingCalls().map(_.successor).toSet - case _ => Set.empty - ) + case _ => IntraProcIRCursor.succ(pos) } final def pred(pos: CFGPosition): Set[CFGPosition] = { - IntraProcIRCursor.pred(pos) ++ - (pos match - case c: Command => { + pos match + case c: Command => IRWalk.prevCommandInBlock(c) match { case Some(d: DirectCall) if d.target.blocks.nonEmpty => d.target.returnBlock.toSet - case o => o.toSet + case o => o.toSet ++ IntraProcIRCursor.pred(pos) } - - } - case c: Procedure => c.incomingCalls().toSet.asInstanceOf[Set[CFGPosition]] - case _ => Set.empty - ) + case c: Procedure => c.incomingCalls().toSet.asInstanceOf[Set[CFGPosition]] + case _ => IntraProcIRCursor.pred(pos) } } @@ -251,9 +245,14 @@ def stronglyConnectedComponents[T <: CFGPosition, O <: T](walker: IRWalk[T, O], out } -def toDot(program: Program, labels: Map[CFGPosition, String] = Map.empty): String = { - val domain = computeDomain[CFGPosition, CFGPosition](IntraProcIRCursor, program.procedures) - toDot[CFGPosition](domain, IntraProcIRCursor, labels) +def toDot(program: Program, labels: Map[CFGPosition, String] = Map.empty, inter: Boolean = false): String = { + if (inter) { + val domain = computeDomain[CFGPosition, CFGPosition](InterProcIRCursor, program.procedures) + toDot[CFGPosition](domain, InterProcIRCursor, labels) + } else { + val domain = computeDomain[CFGPosition, CFGPosition](IntraProcIRCursor, program.procedures) + toDot[CFGPosition](domain, IntraProcIRCursor, labels) + } } def dotCallGraph(program: Program, labels: Map[CFGPosition, String] = Map.empty): String = { diff --git a/src/main/scala/ir/transforms/IndirectCallResolution.scala b/src/main/scala/ir/transforms/IndirectCallResolution.scala index 8f1ae8e3b..2503b00ad 100644 --- a/src/main/scala/ir/transforms/IndirectCallResolution.scala +++ b/src/main/scala/ir/transforms/IndirectCallResolution.scala @@ -103,7 +103,7 @@ def resolveIndirectCallsUsingPointsTo( val targets: mutable.Set[Procedure] = targetNames.map(name => IRProgram.procedures.find(_.name == name).getOrElse(addFakeProcedure(name))) - if (targets.size > 1) { + if (targets.nonEmpty) { Logger.debug(s"Resolved indirect call $indirectCall") } diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index c8eb7e306..4d4a960bc 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -329,6 +329,10 @@ object StaticAnalysis { val constPropSolver = ConstantPropagationSolver(IRProgram) val constPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = constPropSolver.analyze() + config.analysisResultsPath.foreach(s => + writeToFile(printAnalysisResults(IRProgram, constPropResult), s"${s}OGconstprop$iteration.txt") + ) + Logger.debug("[!] Variable dependency summaries") val scc = stronglyConnectedComponents(CallGraph, List(IRProgram.mainProcedure)) val specGlobalAddresses = ctx.specification.globals.map(s => s.address -> s.name).toMap @@ -346,36 +350,27 @@ object StaticAnalysis { writeToFile(toDot(dumpdomain, InterProcIRCursor, Map.empty), s"${f}_new_ir_intercfg$iteration.dot") }) - val reachingDefinitionsAnalysisSolver = ReachingDefinitionsAnalysisSolver(IRProgram) + val reachingDefinitionsAnalysisSolver = InterprocReachingDefinitionsAnalysisSolver(IRProgram) val reachingDefinitionsAnalysisResults = reachingDefinitionsAnalysisSolver.analyze() config.analysisDotPath.foreach(s => { writeToFile( - toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> reachingDefinitionsAnalysisResults(b).toString).toMap), + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> reachingDefinitionsAnalysisResults(b).toString).toMap, true), s"${s}_reachingDefinitions$iteration.dot" ) }) - - Logger.debug("[!] Running RegToMemAnalysisSolver") - val regionAccessesAnalysisSolver = RegionAccessesAnalysisSolver(IRProgram, constPropResult, reachingDefinitionsAnalysisResults) - val regionAccessesAnalysisResults = regionAccessesAnalysisSolver.analyze() - -// config.analysisDotPath.foreach(s => writeToFile(cfg.toDot(Output.labeler(regionAccessesAnalysisResults, true), Output.dotIder), s"${s}_RegTo$iteration.dot")) - config.analysisResultsPath.foreach(s => writeToFile(printAnalysisResults(IRProgram, regionAccessesAnalysisResults), s"${s}_RegTo$iteration.txt")) - config.analysisDotPath.foreach(s => { - writeToFile( - toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> regionAccessesAnalysisResults(b).toString).toMap), - s"${s}_RegTo$iteration.dot" - ) - }) - Logger.debug("[!] Running Constant Propagation with SSA") val constPropSolverWithSSA = ConstantPropagationSolverWithSSA(IRProgram, reachingDefinitionsAnalysisResults) val constPropResultWithSSA = constPropSolverWithSSA.analyze() + val mmm = MemoryModelMap() + mmm.preLoadGlobals(mergedSubroutines, globalOffsets, globalAddresses, globalSizes) + Logger.debug("[!] Running MRA") - val mraSolver = MemoryRegionAnalysisSolver(IRProgram, globalAddresses, globalOffsets, mergedSubroutines, constPropResult, ANRResult, RNAResult, regionAccessesAnalysisResults, reachingDefinitionsAnalysisResults) + val assumeR31 = false + val constantPropForMRA = ConstantPropagationSolver(IRProgram, assumeR31).analyze() + val mraSolver = MemoryRegionAnalysisSolver(IRProgram, globalAddresses, globalOffsets, mergedSubroutines, constantPropForMRA, ANRResult, RNAResult, reachingDefinitionsAnalysisResults, assumeR31, mmm) val mraResult = mraSolver.analyze() config.analysisDotPath.foreach(s => { @@ -397,12 +392,11 @@ object StaticAnalysis { }) Logger.debug("[!] Running MMM") - val mmm = MemoryModelMap() - mmm.convertMemoryRegions(mraSolver.procedureToStackRegions, mraSolver.procedureToHeapRegions, mraSolver.mergeRegions, mergedSubroutines, globalOffsets, globalAddresses, globalSizes, mraSolver.procedureToSharedRegions) + mmm.convertMemoryRegions(mraSolver.procedureToStackRegions, mraSolver.procedureToHeapRegions, mraSolver.mergeRegions, mraSolver.allocationSites, mraSolver.procedureToSharedRegions) mmm.logRegions() Logger.debug("[!] Running Steensgaard") - val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResult, regionAccessesAnalysisResults, mmm, reachingDefinitionsAnalysisResults, globalOffsets) + val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResult, mmm, reachingDefinitionsAnalysisResults, globalOffsets) steensgaardSolver.analyze() val steensgaardResults = steensgaardSolver.pointsTo() val memoryRegionContents = steensgaardSolver.getMemoryRegionContents From 832937f49f8b8b8f143fd6082d87570cfd556ffa Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 30 Sep 2024 11:12:46 +1000 Subject: [PATCH 064/104] stopped propagating stack SAA facts through direct calls --- src/main/scala/analysis/SymbolicAddressAnalysis.scala | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/main/scala/analysis/SymbolicAddressAnalysis.scala b/src/main/scala/analysis/SymbolicAddressAnalysis.scala index 026cbf643..25d327220 100644 --- a/src/main/scala/analysis/SymbolicAddressAnalysis.scala +++ b/src/main/scala/analysis/SymbolicAddressAnalysis.scala @@ -146,6 +146,13 @@ trait SymbolicAddressFunctions(constProp: Map[CFGPosition, Map[Variable, FlatEle case Some(value) => value.value case None => -1 Map(d -> IdEdge(), Left(SymbolicAddress(mallocVariable, HeapLocation(nextMallocCount, procedure(n), size), 0)) -> ConstEdge(TwoElementTop)) + case DirectCall(proc, ret, label) if proc.returnBlock.isEmpty => // for when calls are non returning, kills the stack dataflow facts + d match + case Left(value) => + value.symbolicBase match + case StackLocation(regionIdentifier, parent, size) => Map() + case _ => Map(d -> IdEdge()) + case Right(_) => Map(d -> IdEdge()) case _ => Map(d -> IdEdge()) } From e96246003c8a5eed604392b7e5294cf90b9ada0b Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 30 Sep 2024 11:20:29 +1000 Subject: [PATCH 065/104] fix --- src/main/scala/analysis/solvers/DSAUnionFindSolver.scala | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala b/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala index 85901e3f3..d4b24b0c6 100644 --- a/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala +++ b/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala @@ -22,12 +22,7 @@ class DSAUnionFindSolver extends UnionFindSolver[UniTerm] { if (rep1 == rep2) return - (rep1, rep2) match { - case (t1: DSAUniTerm, t2: DSAUniTerm) => - mkUnion(t1, t2, offset) - case (x, y) => - throw new UnificationFailure(s"Cannot unify $t1 and $t2 (with representatives $x and $y)") - } + mkUnion(t1, t2, offset) } From 83033a79884cbfb2c38cbd9a16ee9138474aea9c Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 30 Sep 2024 11:51:11 +1000 Subject: [PATCH 066/104] fixes --- src/main/scala/analysis/DSA.scala | 2 +- .../analysis/{Local.scala => LocalDSA.scala} | 53 ++++++++++--------- 2 files changed, 30 insertions(+), 25 deletions(-) rename src/main/scala/analysis/{Local.scala => LocalDSA.scala} (93%) diff --git a/src/main/scala/analysis/DSA.scala b/src/main/scala/analysis/DSA.scala index 6101b6311..9cf80b4d7 100644 --- a/src/main/scala/analysis/DSA.scala +++ b/src/main/scala/analysis/DSA.scala @@ -66,7 +66,7 @@ class DSA(program: Program, // perform local analysis on all procs domain.foreach( proc => - val dsg = Local(proc, symResults, constProp, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params).analyze() + val dsg = LocalDSA(proc, symResults, constProp, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params).analyze() locals.update(proc, dsg) bu.update(proc, dsg.cloneSelf()) diff --git a/src/main/scala/analysis/Local.scala b/src/main/scala/analysis/LocalDSA.scala similarity index 93% rename from src/main/scala/analysis/Local.scala rename to src/main/scala/analysis/LocalDSA.scala index 8c9076f1d..053e4ccd1 100644 --- a/src/main/scala/analysis/Local.scala +++ b/src/main/scala/analysis/LocalDSA.scala @@ -21,7 +21,7 @@ import scala.collection.mutable * @param writesTo mapping from procedures to registers they change * @param params mapping from procedures to their parameters */ -class Local( +class LocalDSA( proc: Procedure, symResults: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], @@ -182,28 +182,31 @@ class Local( /** * handles unsupported pointer arithmetic by collapsing all the nodes invloved */ - def unsupportedPointerArithmeticOperation(n: CFGPosition, expr: Expr, lhsCell: DSC): Unit = { - var containsPointer = false - breakable { - for (v <- expr.variables) { - if varToSym.contains(n) && varToSym(n).contains(v) then - containsPointer = true - break - } + def unsupportedPointerArithmeticOperation(n: CFGPosition, expr: Expr, lhsCell: DSC): DSC = { +// var containsPointer = false +// breakable { +// for (v <- expr.variables) { +// if varToSym.contains(n) && varToSym(n).contains(v) then +// containsPointer = true +// break +// } +// } +// if containsPointer then + val cell = expr.variables.foldLeft(lhsCell) { + (c, v) => + val cells: Set[Slice] = graph.getCells(n, v) + + cells.foldLeft(c) { + (c, p) => + graph.mergeCells(c, p._1) + } } - if containsPointer then - val cell = expr.variables.foldLeft(lhsCell) { - (c, v) => - val cells: Set[Slice] = graph.getCells(n, v) - - cells.foldLeft(c) { - (c, p) => - graph.mergeCells(c, p._1) - } - } - val node = cell.node.get - node.flags.unknown = true - val test = graph.collapseNode(node) + + val node = cell.node.get + node.flags.unknown = true + graph.collapseNode(node) + + node.cells(0) } def visit(n: CFGPosition): Unit = { @@ -282,7 +285,8 @@ class Local( case None => // assert(varToSym(n).contains(arg1)) // collapse the result - visitPointerArithmeticOperation(n, lhsCell, arg1, byteSize, true, 0, true) +// visitPointerArithmeticOperation(n, lhsCell, arg1, byteSize, true, 0, true) + unsupportedPointerArithmeticOperation(n, index,DSN(Some(graph)).cells(0)) case arg: Variable => // assert(varToSym(n).contains(arg)) visitPointerArithmeticOperation(n, lhsCell, arg, byteSize, true) @@ -315,7 +319,8 @@ class Local( case None => // assert(varToSym(n).contains(arg1)) // collapse the results - visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg1, byteSize, true, 0, true) + // visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg1, byteSize, true, 0, true) + unsupportedPointerArithmeticOperation(n, index,DSN(Some(graph)).cells(0)) case arg: Variable => // assert(varToSym(n).contains(arg)) visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg, byteSize, true) From 8caf2945608185e2d34427eb67660dce0f1e325b Mon Sep 17 00:00:00 2001 From: l-kent Date: Mon, 30 Sep 2024 11:33:22 +1000 Subject: [PATCH 067/104] example that causes assertions to fail --- examples/stackambiguity/stackambiguity.adt | 673 ++++++++++++++++++++ examples/stackambiguity/stackambiguity.bir | 310 +++++++++ examples/stackambiguity/stackambiguity.c | 19 + examples/stackambiguity/stackambiguity.relf | 122 ++++ 4 files changed, 1124 insertions(+) create mode 100644 examples/stackambiguity/stackambiguity.adt create mode 100644 examples/stackambiguity/stackambiguity.bir create mode 100644 examples/stackambiguity/stackambiguity.c create mode 100644 examples/stackambiguity/stackambiguity.relf diff --git a/examples/stackambiguity/stackambiguity.adt b/examples/stackambiguity/stackambiguity.adt new file mode 100644 index 000000000..c06f24fdd --- /dev/null +++ b/examples/stackambiguity/stackambiguity.adt @@ -0,0 +1,673 @@ +Project(Attrs([Attr("filename","\"a.out\""), +Attr("image-specification","(declare abi (name str))\n(declare arch (name str))\n(declare base-address (addr int))\n(declare bias (off int))\n(declare bits (size int))\n(declare code-region (addr int) (size int) (off int))\n(declare code-start (addr int))\n(declare entry-point (addr int))\n(declare external-reference (addr int) (name str))\n(declare format (name str))\n(declare is-executable (flag bool))\n(declare is-little-endian (flag bool))\n(declare llvm:base-address (addr int))\n(declare llvm:code-entry (name str) (off int) (size int))\n(declare llvm:coff-import-library (name str))\n(declare llvm:coff-virtual-section-header (name str) (addr int) (size int))\n(declare llvm:elf-program-header (name str) (off int) (size int))\n(declare llvm:elf-program-header-flags (name str) (ld bool) (r bool) \n (w bool) (x bool))\n(declare llvm:elf-virtual-program-header (name str) (addr int) (size int))\n(declare llvm:entry-point (addr int))\n(declare llvm:macho-symbol (name str) (value int))\n(declare llvm:name-reference (at int) (name str))\n(declare llvm:relocation (at int) (addr int))\n(declare llvm:section-entry (name str) (addr int) (size int) (off int))\n(declare llvm:section-flags (name str) (r bool) (w bool) (x bool))\n(declare llvm:segment-command (name str) (off int) (size int))\n(declare llvm:segment-command-flags (name str) (r bool) (w bool) (x bool))\n(declare llvm:symbol-entry (name str) (addr int) (size int) (off int)\n (value int))\n(declare llvm:virtual-segment-command (name str) (addr int) (size int))\n(declare mapped (addr int) (size int) (off int))\n(declare named-region (addr int) (size int) (name str))\n(declare named-symbol (addr int) (name str))\n(declare require (name str))\n(declare section (addr int) (size int))\n(declare segment (addr int) (size int) (r bool) (w bool) (x bool))\n(declare subarch (name str))\n(declare symbol-chunk (addr int) (size int) (root int))\n(declare symbol-value (addr int) (value int))\n(declare system (name str))\n(declare vendor (name str))\n\n(abi unknown)\n(arch aarch64)\n(base-address 0)\n(bias 0)\n(bits 64)\n(code-region 1952 20 1952)\n(code-region 1536 416 1536)\n(code-region 1440 96 1440)\n(code-region 1408 24 1408)\n(code-start 1588)\n(code-start 1536)\n(code-start 1860)\n(code-start 1812)\n(entry-point 1536)\n(external-reference 69568 _ITM_deregisterTMCloneTable)\n(external-reference 69576 __cxa_finalize)\n(external-reference 69584 __gmon_start__)\n(external-reference 69600 _ITM_registerTMCloneTable)\n(external-reference 69632 __libc_start_main)\n(external-reference 69640 __cxa_finalize)\n(external-reference 69648 __gmon_start__)\n(external-reference 69656 abort)\n(format elf)\n(is-executable true)\n(is-little-endian true)\n(llvm:base-address 0)\n(llvm:code-entry abort 0 0)\n(llvm:code-entry __cxa_finalize 0 0)\n(llvm:code-entry __libc_start_main 0 0)\n(llvm:code-entry test 1812 48)\n(llvm:code-entry _init 1408 0)\n(llvm:code-entry main 1860 92)\n(llvm:code-entry _start 1536 52)\n(llvm:code-entry abort@GLIBC_2.17 0 0)\n(llvm:code-entry _fini 1952 0)\n(llvm:code-entry __cxa_finalize@GLIBC_2.17 0 0)\n(llvm:code-entry __libc_start_main@GLIBC_2.34 0 0)\n(llvm:code-entry frame_dummy 1808 0)\n(llvm:code-entry __do_global_dtors_aux 1728 0)\n(llvm:code-entry register_tm_clones 1664 0)\n(llvm:code-entry deregister_tm_clones 1616 0)\n(llvm:code-entry call_weak_fn 1588 20)\n(llvm:code-entry .fini 1952 20)\n(llvm:code-entry .text 1536 416)\n(llvm:code-entry .plt 1440 96)\n(llvm:code-entry .init 1408 24)\n(llvm:elf-program-header 08 3528 568)\n(llvm:elf-program-header 07 0 0)\n(llvm:elf-program-header 06 1976 68)\n(llvm:elf-program-header 05 596 68)\n(llvm:elf-program-header 04 3544 480)\n(llvm:elf-program-header 03 3528 616)\n(llvm:elf-program-header 02 0 2256)\n(llvm:elf-program-header 01 568 27)\n(llvm:elf-program-header 00 64 504)\n(llvm:elf-program-header-flags 08 false true false false)\n(llvm:elf-program-header-flags 07 false true true false)\n(llvm:elf-program-header-flags 06 false true false false)\n(llvm:elf-program-header-flags 05 false true false false)\n(llvm:elf-program-header-flags 04 false true true false)\n(llvm:elf-program-header-flags 03 true true true false)\n(llvm:elf-program-header-flags 02 true true false true)\n(llvm:elf-program-header-flags 01 false true false false)\n(llvm:elf-program-header-flags 00 false true false false)\n(llvm:elf-virtual-program-header 08 69064 568)\n(llvm:elf-virtual-program-header 07 0 0)\n(llvm:elf-virtual-program-header 06 1976 68)\n(llvm:elf-virtual-program-header 05 596 68)\n(llvm:elf-virtual-program-header 04 69080 480)\n(llvm:elf-virtual-program-header 03 69064 624)\n(llvm:elf-virtual-program-header 02 0 2256)\n(llvm:elf-virtual-program-header 01 568 27)\n(llvm:elf-virtual-program-header 00 64 504)\n(llvm:entry-point 1536)\n(llvm:name-reference 69656 abort)\n(llvm:name-reference 69648 __gmon_start__)\n(llvm:name-reference 69640 __cxa_finalize)\n(llvm:name-reference 69632 __libc_start_main)\n(llvm:name-reference 69600 _ITM_registerTMCloneTable)\n(llvm:name-reference 69584 __gmon_start__)\n(llvm:name-reference 69576 __cxa_finalize)\n(llvm:name-reference 69568 _ITM_deregisterTMCloneTable)\n(llvm:section-entry .shstrtab 0 259 6922)\n(llvm:section-entry .strtab 0 570 6352)\n(llvm:section-entry .symtab 0 2136 4216)\n(llvm:section-entry .comment 0 71 4144)\n(llvm:section-entry .bss 69680 8 4144)\n(llvm:section-entry .data 69664 16 4128)\n(llvm:section-entry .got.plt 69608 56 4072)\n(llvm:section-entry .got 69560 48 4024)\n(llvm:section-entry .dynamic 69080 480 3544)\n(llvm:section-entry .fini_array 69072 8 3536)\n(llvm:section-entry .init_array 69064 8 3528)\n(llvm:section-entry .eh_frame 2048 208 2048)\n(llvm:section-entry .eh_frame_hdr 1976 68 1976)\n(llvm:section-entry .rodata 1972 4 1972)\n(llvm:section-entry .fini 1952 20 1952)\n(llvm:section-entry .text 1536 416 1536)\n(llvm:section-entry .plt 1440 96 1440)\n(llvm:section-entry .init 1408 24 1408)\n(llvm:section-entry .rela.plt 1312 96 1312)\n(llvm:section-entry .rela.dyn 1120 192 1120)\n(llvm:section-entry .gnu.version_r 1072 48 1072)\n(llvm:section-entry .gnu.version 1054 18 1054)\n(llvm:section-entry .dynstr 912 141 912)\n(llvm:section-entry .dynsym 696 216 696)\n(llvm:section-entry .gnu.hash 664 28 664)\n(llvm:section-entry .note.ABI-tag 632 32 632)\n(llvm:section-entry .note.gnu.build-id 596 36 596)\n(llvm:section-entry .interp 568 27 568)\n(llvm:section-flags .shstrtab true false false)\n(llvm:section-flags .strtab true false false)\n(llvm:section-flags .symtab true false false)\n(llvm:section-flags .comment true false false)\n(llvm:section-flags .bss true true false)\n(llvm:section-flags .data true true false)\n(llvm:section-flags .got.plt true true false)\n(llvm:section-flags .got true true false)\n(llvm:section-flags .dynamic true true false)\n(llvm:section-flags .fini_array true true false)\n(llvm:section-flags .init_array true true false)\n(llvm:section-flags .eh_frame true false false)\n(llvm:section-flags .eh_frame_hdr true false false)\n(llvm:section-flags .rodata true false false)\n(llvm:section-flags .fini true false true)\n(llvm:section-flags .text true false true)\n(llvm:section-flags .plt true false true)\n(llvm:section-flags .init true false true)\n(llvm:section-flags .rela.plt true false false)\n(llvm:section-flags .rela.dyn true false false)\n(llvm:section-flags .gnu.version_r true false false)\n(llvm:section-flags .gnu.version true false false)\n(llvm:section-flags .dynstr true false false)\n(llvm:section-flags .dynsym true false false)\n(llvm:section-flags .gnu.hash true false false)\n(llvm:section-flags .note.ABI-tag true false false)\n(llvm:section-flags .note.gnu.build-id true false false)\n(llvm:section-flags .interp true false false)\n(llvm:symbol-entry abort 0 0 0 0)\n(llvm:symbol-entry __cxa_finalize 0 0 0 0)\n(llvm:symbol-entry __libc_start_main 0 0 0 0)\n(llvm:symbol-entry test 1812 48 1812 1812)\n(llvm:symbol-entry _init 1408 0 1408 1408)\n(llvm:symbol-entry main 1860 92 1860 1860)\n(llvm:symbol-entry _start 1536 52 1536 1536)\n(llvm:symbol-entry abort@GLIBC_2.17 0 0 0 0)\n(llvm:symbol-entry _fini 1952 0 1952 1952)\n(llvm:symbol-entry __cxa_finalize@GLIBC_2.17 0 0 0 0)\n(llvm:symbol-entry __libc_start_main@GLIBC_2.34 0 0 0 0)\n(llvm:symbol-entry frame_dummy 1808 0 1808 1808)\n(llvm:symbol-entry __do_global_dtors_aux 1728 0 1728 1728)\n(llvm:symbol-entry register_tm_clones 1664 0 1664 1664)\n(llvm:symbol-entry deregister_tm_clones 1616 0 1616 1616)\n(llvm:symbol-entry call_weak_fn 1588 20 1588 1588)\n(mapped 0 2256 0)\n(mapped 69064 616 3528)\n(named-region 0 2256 02)\n(named-region 69064 624 03)\n(named-region 568 27 .interp)\n(named-region 596 36 .note.gnu.build-id)\n(named-region 632 32 .note.ABI-tag)\n(named-region 664 28 .gnu.hash)\n(named-region 696 216 .dynsym)\n(named-region 912 141 .dynstr)\n(named-region 1054 18 .gnu.version)\n(named-region 1072 48 .gnu.version_r)\n(named-region 1120 192 .rela.dyn)\n(named-region 1312 96 .rela.plt)\n(named-region 1408 24 .init)\n(named-region 1440 96 .plt)\n(named-region 1536 416 .text)\n(named-region 1952 20 .fini)\n(named-region 1972 4 .rodata)\n(named-region 1976 68 .eh_frame_hdr)\n(named-region 2048 208 .eh_frame)\n(named-region 69064 8 .init_array)\n(named-region 69072 8 .fini_array)\n(named-region 69080 480 .dynamic)\n(named-region 69560 48 .got)\n(named-region 69608 56 .got.plt)\n(named-region 69664 16 .data)\n(named-region 69680 8 .bss)\n(named-region 0 71 .comment)\n(named-region 0 2136 .symtab)\n(named-region 0 570 .strtab)\n(named-region 0 259 .shstrtab)\n(named-symbol 1588 call_weak_fn)\n(named-symbol 1616 deregister_tm_clones)\n(named-symbol 1664 register_tm_clones)\n(named-symbol 1728 __do_global_dtors_aux)\n(named-symbol 1808 frame_dummy)\n(named-symbol 0 __libc_start_main@GLIBC_2.34)\n(named-symbol 0 __cxa_finalize@GLIBC_2.17)\n(named-symbol 1952 _fini)\n(named-symbol 0 abort@GLIBC_2.17)\n(named-symbol 1536 _start)\n(named-symbol 1860 main)\n(named-symbol 1408 _init)\n(named-symbol 1812 test)\n(named-symbol 0 __libc_start_main)\n(named-symbol 0 __cxa_finalize)\n(named-symbol 0 abort)\n(require libc.so.6)\n(section 568 27)\n(section 596 36)\n(section 632 32)\n(section 664 28)\n(section 696 216)\n(section 912 141)\n(section 1054 18)\n(section 1072 48)\n(section 1120 192)\n(section 1312 96)\n(section 1408 24)\n(section 1440 96)\n(section 1536 416)\n(section 1952 20)\n(section 1972 4)\n(section 1976 68)\n(section 2048 208)\n(section 69064 8)\n(section 69072 8)\n(section 69080 480)\n(section 69560 48)\n(section 69608 56)\n(section 69664 16)\n(section 69680 8)\n(section 0 71)\n(section 0 2136)\n(section 0 570)\n(section 0 259)\n(segment 0 2256 true false true)\n(segment 69064 624 true true false)\n(subarch v8)\n(symbol-chunk 1588 20 1588)\n(symbol-chunk 1536 52 1536)\n(symbol-chunk 1860 92 1860)\n(symbol-chunk 1812 48 1812)\n(symbol-value 1588 1588)\n(symbol-value 1616 1616)\n(symbol-value 1664 1664)\n(symbol-value 1728 1728)\n(symbol-value 1808 1808)\n(symbol-value 1952 1952)\n(symbol-value 1536 1536)\n(symbol-value 1860 1860)\n(symbol-value 1408 1408)\n(symbol-value 1812 1812)\n(symbol-value 0 0)\n(system \"\")\n(vendor \"\")\n"), +Attr("abi-name","\"aarch64-linux-gnu-elf\"")]), +Sections([Section(".shstrtab", 0x0, "\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\xb7\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x10\x1c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x40\x00\x38\x00\x09\x00\x40\x00\x1d\x00\x1c\x00\x06\x00\x00\x00\x04\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\xf8\x01\x00\x00\x00\x00\x00\x00\xf8\x01\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd0\x08\x00\x00\x00\x00\x00\x00\xd0\x08\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x06\x00\x00\x00\xc8\x0d\x00\x00\x00\x00\x00\x00\xc8\x0d\x01\x00\x00\x00\x00\x00\xc8\x0d\x01"), +Section(".strtab", 0x0, "\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\xb7\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x10\x1c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x40\x00\x38\x00\x09\x00\x40\x00\x1d\x00\x1c\x00\x06\x00\x00\x00\x04\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\xf8\x01\x00\x00\x00\x00\x00\x00\xf8\x01\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd0\x08\x00\x00\x00\x00\x00\x00\xd0\x08\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x06\x00\x00\x00\xc8\x0d\x00\x00\x00\x00\x00\x00\xc8\x0d\x01\x00\x00\x00\x00\x00\xc8\x0d\x01\x00\x00\x00\x00\x00\x68\x02\x00\x00\x00\x00\x00\x00\x70\x02\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x06\x00\x00\x00\xd8\x0d\x00\x00\x00\x00\x00\x00\xd8\x0d\x01\x00\x00\x00\x00\x00\xd8\x0d\x01\x00\x00\x00\x00\x00\xe0\x01\x00\x00\x00\x00\x00\x00\xe0\x01\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00\x54\x02\x00\x00\x00\x00\x00\x00\x54\x02\x00\x00\x00\x00\x00\x00\x54\x02\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x50\xe5\x74\x64\x04\x00\x00\x00\xb8\x07\x00\x00\x00\x00\x00\x00\xb8\x07\x00\x00\x00\x00\x00\x00\xb8\x07\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x51\xe5\x74\x64\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x52\xe5\x74\x64\x04\x00\x00\x00\xc8\x0d\x00\x00\x00\x00\x00\x00\xc8\x0d\x01\x00\x00\x00\x00\x00\xc8\x0d\x01\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x2f\x6c"), +Section(".symtab", 0x0, "\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\xb7\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x10\x1c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x40\x00\x38\x00\x09\x00\x40\x00\x1d\x00\x1c\x00\x06\x00\x00\x00\x04\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\xf8\x01\x00\x00\x00\x00\x00\x00\xf8\x01\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x04\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd0\x08\x00\x00\x00\x00\x00\x00\xd0\x08\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x06\x00\x00\x00\xc8\x0d\x00\x00\x00\x00\x00\x00\xc8\x0d\x01\x00\x00\x00\x00\x00\xc8\x0d\x01\x00\x00\x00\x00\x00\x68\x02\x00\x00\x00\x00\x00\x00\x70\x02\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x06\x00\x00\x00\xd8\x0d\x00\x00\x00\x00\x00\x00\xd8\x0d\x01\x00\x00\x00\x00\x00\xd8\x0d\x01\x00\x00\x00\x00\x00\xe0\x01\x00\x00\x00\x00\x00\x00\xe0\x01\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x04\x00\x00\x00\x54\x02\x00\x00\x00\x00\x00\x00\x54\x02\x00\x00\x00\x00\x00\x00\x54\x02\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x50\xe5\x74\x64\x04\x00\x00\x00\xb8\x07\x00\x00\x00\x00\x00\x00\xb8\x07\x00\x00\x00\x00\x00\x00\xb8\x07\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x51\xe5\x74\x64\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x52\xe5\x74\x64\x04\x00\x00\x00\xc8\x0d\x00\x00\x00\x00\x00\x00\xc8\x0d\x01\x00\x00\x00\x00\x00\xc8\x0d\x01\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x38\x02\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x2f\x6c\x69\x62\x2f\x6c\x64\x2d\x6c\x69\x6e\x75\x78\x2d\x61\x61\x72\x63\x68\x36\x34\x2e\x73\x6f\x2e\x31\x00\x00\x04\x00\x00\x00\x14\x00\x00\x00\x03\x00\x00\x00\x47\x4e\x55\x00\x75\xee\xa3\x81\x5a\xcb\x3c\xbb\xd6\xda\x03\x86\xf7\x5e\x45\x89\x13\xf4\x8c\xfe\x04\x00\x00\x00\x10\x00\x00\x00\x01\x00\x00\x00\x47\x4e\x55\x00\x00\x00\x00\x00\x03\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x0b\x00\x80\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x17\x00\x20\x10\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x48\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x22\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x64\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x22\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x73\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x5f\x5f\x63\x78\x61\x5f\x66\x69\x6e\x61\x6c\x69\x7a\x65\x00\x5f\x5f\x6c\x69\x62\x63\x5f\x73\x74\x61\x72\x74\x5f\x6d\x61\x69\x6e\x00\x61\x62\x6f\x72\x74\x00\x6c\x69\x62\x63\x2e\x73\x6f\x2e\x36\x00\x47\x4c\x49\x42\x43\x5f\x32\x2e\x31\x37\x00\x47\x4c\x49\x42\x43\x5f\x32\x2e\x33\x34\x00\x5f\x49\x54\x4d\x5f\x64\x65\x72\x65\x67\x69\x73\x74\x65\x72\x54\x4d\x43\x6c\x6f\x6e\x65\x54\x61\x62\x6c\x65\x00\x5f\x5f\x67\x6d\x6f\x6e\x5f\x73\x74\x61\x72\x74\x5f\x5f\x00\x5f\x49\x54\x4d\x5f\x72\x65\x67\x69\x73\x74\x65\x72\x54\x4d\x43\x6c\x6f\x6e\x65\x54\x61\x62\x6c\x65\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x01\x00\x03\x00\x01\x00\x03\x00\x01\x00\x01\x00\x02\x00\x28\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x97\x91\x96\x06\x00\x00\x03\x00\x32\x00\x00\x00\x10\x00\x00\x00\xb4\x91\x96\x06\x00\x00\x02\x00\x3d\x00\x00\x00\x00\x00\x00\x00\xc8\x0d\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x10\x07\x00\x00\x00\x00\x00\x00\xd0\x0d\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\xc0\x06\x00\x00\x00\x00\x00\x00\xd8\x0f\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x44\x07\x00\x00\x00\x00\x00\x00\x28\x10\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x28\x10\x01\x00\x00\x00\x00\x00\xc0\x0f\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc8\x0f\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd0\x0f\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe0\x0f\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x10\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x10\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x18\x10\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\x20\x03\xd5\xfd\x7b\xbf\xa9\xfd\x03\x00\x91\x2a\x00\x00\x94\xfd\x7b\xc1\xa8\xc0\x03\x5f\xd6\x00\x00\x00\x00\x00\x00\x00\x00\xf0\x7b\xbf\xa9\x90\x00\x00\x90\x11\xfe\x47\xf9\x10\xe2\x3f\x91\x20\x02\x1f\xd6\x1f\x20\x03\xd5\x1f\x20\x03\xd5\x1f\x20\x03\xd5\x90\x00\x00\xb0\x11\x02\x40\xf9\x10\x02\x00\x91\x20\x02\x1f\xd6\x90\x00\x00\xb0\x11\x06\x40\xf9\x10\x22\x00\x91\x20\x02\x1f\xd6\x90\x00\x00\xb0\x11\x0a\x40\xf9\x10\x42\x00\x91\x20\x02\x1f\xd6\x90\x00\x00\xb0\x11\x0e\x40\xf9\x10\x62\x00\x91\x20\x02\x1f\xd6\x1f\x20\x03\xd5\x1d\x00\x80\xd2\x1e\x00\x80\xd2\xe5\x03\x00\xaa\xe1\x03\x40\xf9\xe2\x23\x00\x91\xe6\x03\x00\x91\x80\x00\x00\x90\x00\xec\x47\xf9\x03\x00\x80\xd2\x04\x00\x80\xd2\xe5\xff\xff\x97\xf0\xff\xff\x97\x80\x00\x00\x90\x00\xe8\x47\xf9\x40\x00\x00\xb4\xe8\xff\xff\x17\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\x1f\x20\x03\xd5\x80\x00\x00\xb0\x00\xc0\x00\x91\x81\x00\x00\xb0\x21\xc0\x00\x91\x3f\x00\x00\xeb\xc0\x00\x00\x54\x81\x00\x00\x90\x21\xe0\x47\xf9\x61\x00\x00\xb4\xf0\x03\x01\xaa\x00\x02\x1f\xd6\xc0\x03\x5f\xd6\x80\x00\x00\xb0\x00\xc0\x00\x91\x81\x00\x00\xb0\x21\xc0\x00\x91\x21\x00\x00\xcb\x22\xfc\x7f\xd3\x41\x0c\x81\x8b\x21\xfc\x41\x93\xc1\x00\x00\xb4\x82\x00\x00\x90\x42\xf0\x47\xf9\x62\x00\x00\xb4\xf0\x03\x02\xaa\x00\x02\x1f\xd6\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\xfd\x7b\xbe\xa9\xfd\x03\x00\x91\xf3\x0b\x00\xf9\x93\x00\x00\xb0\x60\xc2\x40\x39\x40\x01\x00\x35\x80\x00\x00\x90\x00\xe4\x47\xf9\x80\x00\x00\xb4\x80\x00\x00\xb0\x00\x14\x40\xf9\xb9\xff\xff\x97\xd8\xff\xff\x97\x20\x00\x80\x52\x60\xc2\x00\x39\xf3\x0b\x40\xf9\xfd\x7b\xc2\xa8\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\x1f\x20\x03\xd5\xdc\xff\xff\x17\xff\x43\x00\xd1\xe8\x03\x00\x91\x7f\x0c\x00\x71\x09\xc9\x23\x8b\x08\x21\x00\x91\x28\xb1\x88\x9a\xe0\x07\x00\x29\xe2\x7f\x01\x29\x08\x01\x40\xb9\x00\x05\x00\x11\xff\x43\x00\x91\xc0\x03\x5f\xd6\xff\x43\x00\xd1\x09\x10\x00\x11\x0b\x08\x00\x0b\xc8\x00\x80\x52\xea\x03\x00\x91\x0d\x7c\x7e\x93\x08\x01\x00\x4b\x4c\x21\x00\x91\x1f\x0c\x00\x71\x4a\x01\x0d\x8b\xe9\x2f\x00\x29\x49\xb1\x8c\x9a\x4d\x0b\x80\x52\x4a\xb1\x8c\x9a\xe8\x7f\x01\x29\x29\x01\x40\xb9\xeb\x23\x00\x29\xed\x07\x00\xf9\x48\x01\x40\xb9\x28\x01\x08\x0b\x00\x09\x00\x11\xff\x43\x00\x91\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\xfd\x7b\xbf\xa9\xfd\x03\x00\x91\xfd\x7b\xc1\xa8\xc0\x03\x5f\xd6\x01\x00\x02\x00\x01\x1b\x03\x3b\x44\x00\x00\x00\x07\x00\x00\x00\x48\xfe\xff\xff\x5c\x00\x00\x00\x98\xfe\xff\xff\x70\x00\x00\x00\xc8\xfe\xff\xff\x84\x00\x00\x00\x08\xff\xff\xff\x98\x00\x00\x00\x58\xff\xff\xff\xbc\x00\x00\x00\x5c\xff\xff\xff\xe4\x00\x00\x00\x8c\xff\xff\xff\xfc\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x01\x7a\x52\x00\x04\x78\x1e\x01\x1b\x0c\x1f\x00\x10\x00\x00\x00\x18\x00\x00\x00\xe4\xfd\xff\xff\x34\x00\x00\x00\x00\x41\x07\x1e\x10\x00\x00\x00\x2c\x00\x00\x00\x20\xfe\xff\xff\x30\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x40\x00\x00\x00\x3c\xfe\xff\xff\x3c\x00\x00\x00\x00\x00\x00\x00\x20\x00\x00\x00\x54\x00\x00\x00"), +Section(".comment", 0x0, "\x7f\x45\x4c\x46\x02\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\xb7\x00\x01\x00\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x10\x1c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x40\x00\x38\x00\x09\x00\x40\x00\x1d\x00\x1c\x00\x06\x00\x00\x00\x04\x00\x00"), +Section(".interp", 0x238, "\x2f\x6c\x69\x62\x2f\x6c\x64\x2d\x6c\x69\x6e\x75\x78\x2d\x61\x61\x72\x63\x68\x36\x34\x2e\x73\x6f\x2e\x31\x00"), +Section(".note.gnu.build-id", 0x254, "\x04\x00\x00\x00\x14\x00\x00\x00\x03\x00\x00\x00\x47\x4e\x55\x00\x75\xee\xa3\x81\x5a\xcb\x3c\xbb\xd6\xda\x03\x86\xf7\x5e\x45\x89\x13\xf4\x8c\xfe"), +Section(".note.ABI-tag", 0x278, "\x04\x00\x00\x00\x10\x00\x00\x00\x01\x00\x00\x00\x47\x4e\x55\x00\x00\x00\x00\x00\x03\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00"), +Section(".gnu.hash", 0x298, "\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".dynsym", 0x2B8, "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x0b\x00\x80\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x17\x00\x20\x10\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x48\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x22\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x64\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x22\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x73\x00\x00\x00\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".dynstr", 0x390, "\x00\x5f\x5f\x63\x78\x61\x5f\x66\x69\x6e\x61\x6c\x69\x7a\x65\x00\x5f\x5f\x6c\x69\x62\x63\x5f\x73\x74\x61\x72\x74\x5f\x6d\x61\x69\x6e\x00\x61\x62\x6f\x72\x74\x00\x6c\x69\x62\x63\x2e\x73\x6f\x2e\x36\x00\x47\x4c\x49\x42\x43\x5f\x32\x2e\x31\x37\x00\x47\x4c\x49\x42\x43\x5f\x32\x2e\x33\x34\x00\x5f\x49\x54\x4d\x5f\x64\x65\x72\x65\x67\x69\x73\x74\x65\x72\x54\x4d\x43\x6c\x6f\x6e\x65\x54\x61\x62\x6c\x65\x00\x5f\x5f\x67\x6d\x6f\x6e\x5f\x73\x74\x61\x72\x74\x5f\x5f\x00\x5f\x49\x54\x4d\x5f\x72\x65\x67\x69\x73\x74\x65\x72\x54\x4d\x43\x6c\x6f\x6e\x65\x54\x61\x62\x6c\x65\x00"), +Section(".gnu.version", 0x41E, "\x00\x00\x00\x00\x00\x00\x02\x00\x01\x00\x03\x00\x01\x00\x03\x00\x01\x00"), +Section(".gnu.version_r", 0x430, "\x01\x00\x02\x00\x28\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x97\x91\x96\x06\x00\x00\x03\x00\x32\x00\x00\x00\x10\x00\x00\x00\xb4\x91\x96\x06\x00\x00\x02\x00\x3d\x00\x00\x00\x00\x00\x00\x00"), +Section(".rela.dyn", 0x460, "\xc8\x0d\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x10\x07\x00\x00\x00\x00\x00\x00\xd0\x0d\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\xc0\x06\x00\x00\x00\x00\x00\x00\xd8\x0f\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x44\x07\x00\x00\x00\x00\x00\x00\x28\x10\x01\x00\x00\x00\x00\x00\x03\x04\x00\x00\x00\x00\x00\x00\x28\x10\x01\x00\x00\x00\x00\x00\xc0\x0f\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc8\x0f\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd0\x0f\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe0\x0f\x01\x00\x00\x00\x00\x00\x01\x04\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".rela.plt", 0x520, "\x00\x10\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x10\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x10\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x18\x10\x01\x00\x00\x00\x00\x00\x02\x04\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".init", 0x580, "\x1f\x20\x03\xd5\xfd\x7b\xbf\xa9\xfd\x03\x00\x91\x2a\x00\x00\x94\xfd\x7b\xc1\xa8\xc0\x03\x5f\xd6"), +Section(".plt", 0x5A0, "\xf0\x7b\xbf\xa9\x90\x00\x00\x90\x11\xfe\x47\xf9\x10\xe2\x3f\x91\x20\x02\x1f\xd6\x1f\x20\x03\xd5\x1f\x20\x03\xd5\x1f\x20\x03\xd5\x90\x00\x00\xb0\x11\x02\x40\xf9\x10\x02\x00\x91\x20\x02\x1f\xd6\x90\x00\x00\xb0\x11\x06\x40\xf9\x10\x22\x00\x91\x20\x02\x1f\xd6\x90\x00\x00\xb0\x11\x0a\x40\xf9\x10\x42\x00\x91\x20\x02\x1f\xd6\x90\x00\x00\xb0\x11\x0e\x40\xf9\x10\x62\x00\x91\x20\x02\x1f\xd6"), +Section(".text", 0x600, "\x1f\x20\x03\xd5\x1d\x00\x80\xd2\x1e\x00\x80\xd2\xe5\x03\x00\xaa\xe1\x03\x40\xf9\xe2\x23\x00\x91\xe6\x03\x00\x91\x80\x00\x00\x90\x00\xec\x47\xf9\x03\x00\x80\xd2\x04\x00\x80\xd2\xe5\xff\xff\x97\xf0\xff\xff\x97\x80\x00\x00\x90\x00\xe8\x47\xf9\x40\x00\x00\xb4\xe8\xff\xff\x17\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\x1f\x20\x03\xd5\x80\x00\x00\xb0\x00\xc0\x00\x91\x81\x00\x00\xb0\x21\xc0\x00\x91\x3f\x00\x00\xeb\xc0\x00\x00\x54\x81\x00\x00\x90\x21\xe0\x47\xf9\x61\x00\x00\xb4\xf0\x03\x01\xaa\x00\x02\x1f\xd6\xc0\x03\x5f\xd6\x80\x00\x00\xb0\x00\xc0\x00\x91\x81\x00\x00\xb0\x21\xc0\x00\x91\x21\x00\x00\xcb\x22\xfc\x7f\xd3\x41\x0c\x81\x8b\x21\xfc\x41\x93\xc1\x00\x00\xb4\x82\x00\x00\x90\x42\xf0\x47\xf9\x62\x00\x00\xb4\xf0\x03\x02\xaa\x00\x02\x1f\xd6\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\xfd\x7b\xbe\xa9\xfd\x03\x00\x91\xf3\x0b\x00\xf9\x93\x00\x00\xb0\x60\xc2\x40\x39\x40\x01\x00\x35\x80\x00\x00\x90\x00\xe4\x47\xf9\x80\x00\x00\xb4\x80\x00\x00\xb0\x00\x14\x40\xf9\xb9\xff\xff\x97\xd8\xff\xff\x97\x20\x00\x80\x52\x60\xc2\x00\x39\xf3\x0b\x40\xf9\xfd\x7b\xc2\xa8\xc0\x03\x5f\xd6\x1f\x20\x03\xd5\x1f\x20\x03\xd5\xdc\xff\xff\x17\xff\x43\x00\xd1\xe8\x03\x00\x91\x7f\x0c\x00\x71\x09\xc9\x23\x8b\x08\x21\x00\x91\x28\xb1\x88\x9a\xe0\x07\x00\x29\xe2\x7f\x01\x29\x08\x01\x40\xb9\x00\x05\x00\x11\xff\x43\x00\x91\xc0\x03\x5f\xd6\xff\x43\x00\xd1\x09\x10\x00\x11\x0b\x08\x00\x0b\xc8\x00\x80\x52\xea\x03\x00\x91\x0d\x7c\x7e\x93\x08\x01\x00\x4b\x4c\x21\x00\x91\x1f\x0c\x00\x71\x4a\x01\x0d\x8b\xe9\x2f\x00\x29\x49\xb1\x8c\x9a\x4d\x0b\x80\x52\x4a\xb1\x8c\x9a\xe8\x7f\x01\x29\x29\x01\x40\xb9\xeb\x23\x00\x29\xed\x07\x00\xf9\x48\x01\x40\xb9\x28\x01\x08\x0b\x00\x09\x00\x11\xff\x43\x00\x91\xc0\x03\x5f\xd6"), +Section(".fini", 0x7A0, "\x1f\x20\x03\xd5\xfd\x7b\xbf\xa9\xfd\x03\x00\x91\xfd\x7b\xc1\xa8\xc0\x03\x5f\xd6"), +Section(".rodata", 0x7B4, "\x01\x00\x02\x00"), +Section(".eh_frame_hdr", 0x7B8, "\x01\x1b\x03\x3b\x44\x00\x00\x00\x07\x00\x00\x00\x48\xfe\xff\xff\x5c\x00\x00\x00\x98\xfe\xff\xff\x70\x00\x00\x00\xc8\xfe\xff\xff\x84\x00\x00\x00\x08\xff\xff\xff\x98\x00\x00\x00\x58\xff\xff\xff\xbc\x00\x00\x00\x5c\xff\xff\xff\xe4\x00\x00\x00\x8c\xff\xff\xff\xfc\x00\x00\x00"), +Section(".eh_frame", 0x800, "\x10\x00\x00\x00\x00\x00\x00\x00\x01\x7a\x52\x00\x04\x78\x1e\x01\x1b\x0c\x1f\x00\x10\x00\x00\x00\x18\x00\x00\x00\xe4\xfd\xff\xff\x34\x00\x00\x00\x00\x41\x07\x1e\x10\x00\x00\x00\x2c\x00\x00\x00\x20\xfe\xff\xff\x30\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x40\x00\x00\x00\x3c\xfe\xff\xff\x3c\x00\x00\x00\x00\x00\x00\x00\x20\x00\x00\x00\x54\x00\x00\x00\x68\xfe\xff\xff\x48\x00\x00\x00\x00\x41\x0e\x20\x9d\x04\x9e\x03\x42\x93\x02\x4e\xde\xdd\xd3\x0e\x00\x00\x00\x00\x10\x00\x00\x00\x78\x00\x00\x00\x94\xfe\xff\xff\x04\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x01\x7a\x52\x00\x01\x7c\x1e\x01\x1b\x0c\x1f\x00\x14\x00\x00\x00\x18\x00\x00\x00\x70\xfe\xff\xff\x30\x00\x00\x00\x00\x44\x0e\x10\x68\x0e\x00\x00\x14\x00\x00\x00\x30\x00\x00\x00\x88\xfe\xff\xff\x5c\x00\x00\x00\x00\x44\x0e\x10\x02\x54\x0e\x00\x00\x00\x00\x00"), +Section(".fini_array", 0x10DD0, "\xc0\x06\x00\x00\x00\x00\x00\x00"), +Section(".dynamic", 0x10DD8, "\x01\x00\x00\x00\x00\x00\x00\x00\x28\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x80\x05\x00\x00\x00\x00\x00\x00\x0d\x00\x00\x00\x00\x00\x00\x00\xa0\x07\x00\x00\x00\x00\x00\x00\x19\x00\x00\x00\x00\x00\x00\x00\xc8\x0d\x01\x00\x00\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00\xd0\x0d\x01\x00\x00\x00\x00\x00\x1c\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\xf5\xfe\xff\x6f\x00\x00\x00\x00\x98\x02\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x90\x03\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00\x00\x00\x00\x00\xb8\x02\x00\x00\x00\x00\x00\x00\x0a\x00\x00\x00\x00\x00\x00\x00\x8d\x00\x00\x00\x00\x00\x00\x00\x0b\x00\x00\x00\x00\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x15\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\xe8\x0f\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x60\x00\x00\x00\x00\x00\x00\x00\x14\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x17\x00\x00\x00\x00\x00\x00\x00\x20\x05\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x00\x00\x00\x00\x60\x04\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\xfb\xff\xff\x6f\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\xfe\xff\xff\x6f\x00\x00\x00\x00\x30\x04\x00\x00\x00\x00\x00\x00\xff\xff\xff\x6f\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xf0\xff\xff\x6f\x00\x00\x00\x00\x1e\x04\x00\x00\x00\x00\x00\x00\xf9\xff\xff\x6f\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".got", 0x10FB8, "\xd8\x0d\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x44\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), +Section(".got.plt", 0x10FE8, "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xa0\x05\x00\x00\x00\x00\x00\x00\xa0\x05\x00\x00\x00\x00\x00\x00\xa0\x05\x00\x00\x00\x00\x00\x00\xa0\x05\x00\x00\x00\x00\x00\x00"), +Section(".data", 0x11020, "\x00\x00\x00\x00\x00\x00\x00\x00\x28\x10\x01\x00\x00\x00\x00\x00"), +Section(".init_array", 0x10DC8, "\x10\x07\x00\x00\x00\x00\x00\x00")]), +Memmap([Annotation(Region(0x0,0x8CF), Attr("segment","02 0 2256")), +Annotation(Region(0x600,0x633), Attr("symbol","\"_start\"")), +Annotation(Region(0x0,0x102), Attr("section","\".shstrtab\"")), +Annotation(Region(0x0,0x239), Attr("section","\".strtab\"")), +Annotation(Region(0x0,0x857), Attr("section","\".symtab\"")), +Annotation(Region(0x0,0x46), Attr("section","\".comment\"")), +Annotation(Region(0x238,0x252), Attr("section","\".interp\"")), +Annotation(Region(0x254,0x277), Attr("section","\".note.gnu.build-id\"")), +Annotation(Region(0x278,0x297), Attr("section","\".note.ABI-tag\"")), +Annotation(Region(0x298,0x2B3), Attr("section","\".gnu.hash\"")), +Annotation(Region(0x2B8,0x38F), Attr("section","\".dynsym\"")), +Annotation(Region(0x390,0x41C), Attr("section","\".dynstr\"")), +Annotation(Region(0x41E,0x42F), Attr("section","\".gnu.version\"")), +Annotation(Region(0x430,0x45F), Attr("section","\".gnu.version_r\"")), +Annotation(Region(0x460,0x51F), Attr("section","\".rela.dyn\"")), +Annotation(Region(0x520,0x57F), Attr("section","\".rela.plt\"")), +Annotation(Region(0x580,0x597), Attr("section","\".init\"")), +Annotation(Region(0x5A0,0x5FF), Attr("section","\".plt\"")), +Annotation(Region(0x580,0x597), Attr("code-region","()")), +Annotation(Region(0x5A0,0x5FF), Attr("code-region","()")), +Annotation(Region(0x600,0x633), Attr("symbol-info","_start 0x600 52")), +Annotation(Region(0x634,0x647), Attr("symbol","\"call_weak_fn\"")), +Annotation(Region(0x634,0x647), Attr("symbol-info","call_weak_fn 0x634 20")), +Annotation(Region(0x714,0x743), Attr("symbol","\"test\"")), +Annotation(Region(0x600,0x79F), Attr("section","\".text\"")), +Annotation(Region(0x600,0x79F), Attr("code-region","()")), +Annotation(Region(0x714,0x743), Attr("symbol-info","test 0x714 48")), +Annotation(Region(0x744,0x79F), Attr("symbol","\"main\"")), +Annotation(Region(0x744,0x79F), Attr("symbol-info","main 0x744 92")), +Annotation(Region(0x7A0,0x7B3), Attr("section","\".fini\"")), +Annotation(Region(0x7A0,0x7B3), Attr("code-region","()")), +Annotation(Region(0x7B4,0x7B7), Attr("section","\".rodata\"")), +Annotation(Region(0x7B8,0x7FB), Attr("section","\".eh_frame_hdr\"")), +Annotation(Region(0x800,0x8CF), Attr("section","\".eh_frame\"")), +Annotation(Region(0x10DC8,0x1102F), Attr("segment","03 0x10DC8 624")), +Annotation(Region(0x10DD0,0x10DD7), Attr("section","\".fini_array\"")), +Annotation(Region(0x10DD8,0x10FB7), Attr("section","\".dynamic\"")), +Annotation(Region(0x10FB8,0x10FE7), Attr("section","\".got\"")), +Annotation(Region(0x10FE8,0x1101F), Attr("section","\".got.plt\"")), +Annotation(Region(0x11020,0x1102F), Attr("section","\".data\"")), +Annotation(Region(0x10DC8,0x10DCF), Attr("section","\".init_array\""))]), +Program(Tid(1_876, "%00000754"), Attrs([]), + Subs([Sub(Tid(1_853, "@__cxa_finalize"), + Attrs([Attr("c.proto","signed (*)(void)"), Attr("address","0x5D0"), +Attr("stub","()")]), "__cxa_finalize", Args([Arg(Tid(1_877, "%00000755"), + Attrs([Attr("c.layout","[signed : 32]"), Attr("c.data","Top:u32"), +Attr("c.type","signed")]), Var("__cxa_finalize_result",Imm(32)), +LOW(32,Var("R0",Imm(64))), Out())]), Blks([Blk(Tid(1_211, "@__cxa_finalize"), + Attrs([Attr("address","0x5D0")]), Phis([]), +Defs([Def(Tid(1_459, "%000005b3"), Attrs([Attr("address","0x5D0"), +Attr("insn","adrp x16, #69632")]), Var("R16",Imm(64)), Int(69632,64)), +Def(Tid(1_466, "%000005ba"), Attrs([Attr("address","0x5D4"), +Attr("insn","ldr x17, [x16, #0x8]")]), Var("R17",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R16",Imm(64)),Int(8,64)),LittleEndian(),64)), +Def(Tid(1_472, "%000005c0"), Attrs([Attr("address","0x5D8"), +Attr("insn","add x16, x16, #0x8")]), Var("R16",Imm(64)), +PLUS(Var("R16",Imm(64)),Int(8,64)))]), Jmps([Call(Tid(1_477, "%000005c5"), + Attrs([Attr("address","0x5DC"), Attr("insn","br x17")]), Int(1,1), +(Indirect(Var("R17",Imm(64))),))]))])), +Sub(Tid(1_854, "@__do_global_dtors_aux"), + Attrs([Attr("c.proto","signed (*)(void)"), Attr("address","0x6C0")]), + "__do_global_dtors_aux", Args([Arg(Tid(1_878, "%00000756"), + Attrs([Attr("c.layout","[signed : 32]"), Attr("c.data","Top:u32"), +Attr("c.type","signed")]), Var("__do_global_dtors_aux_result",Imm(32)), +LOW(32,Var("R0",Imm(64))), Out())]), +Blks([Blk(Tid(730, "@__do_global_dtors_aux"), + Attrs([Attr("address","0x6C0")]), Phis([]), Defs([Def(Tid(734, "%000002de"), + Attrs([Attr("address","0x6C0"), +Attr("insn","stp x29, x30, [sp, #-0x20]!")]), Var("#3",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(18446744073709551584,64))), +Def(Tid(740, "%000002e4"), Attrs([Attr("address","0x6C0"), +Attr("insn","stp x29, x30, [sp, #-0x20]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("#3",Imm(64)),Var("R29",Imm(64)),LittleEndian(),64)), +Def(Tid(746, "%000002ea"), Attrs([Attr("address","0x6C0"), +Attr("insn","stp x29, x30, [sp, #-0x20]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("#3",Imm(64)),Int(8,64)),Var("R30",Imm(64)),LittleEndian(),64)), +Def(Tid(750, "%000002ee"), Attrs([Attr("address","0x6C0"), +Attr("insn","stp x29, x30, [sp, #-0x20]!")]), Var("R31",Imm(64)), +Var("#3",Imm(64))), Def(Tid(756, "%000002f4"), + Attrs([Attr("address","0x6C4"), Attr("insn","mov x29, sp")]), + Var("R29",Imm(64)), Var("R31",Imm(64))), Def(Tid(764, "%000002fc"), + Attrs([Attr("address","0x6C8"), Attr("insn","str x19, [sp, #0x10]")]), + Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(16,64)),Var("R19",Imm(64)),LittleEndian(),64)), +Def(Tid(769, "%00000301"), Attrs([Attr("address","0x6CC"), +Attr("insn","adrp x19, #69632")]), Var("R19",Imm(64)), Int(69632,64)), +Def(Tid(776, "%00000308"), Attrs([Attr("address","0x6D0"), +Attr("insn","ldrb w0, [x19, #0x30]")]), Var("R0",Imm(64)), +UNSIGNED(64,Load(Var("mem",Mem(64,8)),PLUS(Var("R19",Imm(64)),Int(48,64)),LittleEndian(),8)))]), +Jmps([Goto(Tid(783, "%0000030f"), Attrs([Attr("address","0x6D4"), +Attr("insn","cbnz w0, #0x28")]), + NEQ(Extract(31,0,Var("R0",Imm(64))),Int(0,32)), +Direct(Tid(781, "%0000030d"))), Goto(Tid(1_855, "%0000073f"), Attrs([]), + Int(1,1), Direct(Tid(1_156, "%00000484")))])), Blk(Tid(1_156, "%00000484"), + Attrs([Attr("address","0x6D8")]), Phis([]), +Defs([Def(Tid(1_159, "%00000487"), Attrs([Attr("address","0x6D8"), +Attr("insn","adrp x0, #65536")]), Var("R0",Imm(64)), Int(65536,64)), +Def(Tid(1_166, "%0000048e"), Attrs([Attr("address","0x6DC"), +Attr("insn","ldr x0, [x0, #0xfc8]")]), Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R0",Imm(64)),Int(4040,64)),LittleEndian(),64))]), +Jmps([Goto(Tid(1_172, "%00000494"), Attrs([Attr("address","0x6E0"), +Attr("insn","cbz x0, #0x10")]), EQ(Var("R0",Imm(64)),Int(0,64)), +Direct(Tid(1_170, "%00000492"))), Goto(Tid(1_856, "%00000740"), Attrs([]), + Int(1,1), Direct(Tid(1_195, "%000004ab")))])), Blk(Tid(1_195, "%000004ab"), + Attrs([Attr("address","0x6E4")]), Phis([]), +Defs([Def(Tid(1_198, "%000004ae"), Attrs([Attr("address","0x6E4"), +Attr("insn","adrp x0, #69632")]), Var("R0",Imm(64)), Int(69632,64)), +Def(Tid(1_205, "%000004b5"), Attrs([Attr("address","0x6E8"), +Attr("insn","ldr x0, [x0, #0x28]")]), Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R0",Imm(64)),Int(40,64)),LittleEndian(),64)), +Def(Tid(1_210, "%000004ba"), Attrs([Attr("address","0x6EC"), +Attr("insn","bl #-0x11c")]), Var("R30",Imm(64)), Int(1776,64))]), +Jmps([Call(Tid(1_213, "%000004bd"), Attrs([Attr("address","0x6EC"), +Attr("insn","bl #-0x11c")]), Int(1,1), +(Direct(Tid(1_853, "@__cxa_finalize")),Direct(Tid(1_170, "%00000492"))))])), +Blk(Tid(1_170, "%00000492"), Attrs([Attr("address","0x6F0")]), Phis([]), +Defs([Def(Tid(1_178, "%0000049a"), Attrs([Attr("address","0x6F0"), +Attr("insn","bl #-0xa0")]), Var("R30",Imm(64)), Int(1780,64))]), +Jmps([Call(Tid(1_180, "%0000049c"), Attrs([Attr("address","0x6F0"), +Attr("insn","bl #-0xa0")]), Int(1,1), +(Direct(Tid(1_867, "@deregister_tm_clones")),Direct(Tid(1_182, "%0000049e"))))])), +Blk(Tid(1_182, "%0000049e"), Attrs([Attr("address","0x6F4")]), Phis([]), +Defs([Def(Tid(1_185, "%000004a1"), Attrs([Attr("address","0x6F4"), +Attr("insn","mov w0, #0x1")]), Var("R0",Imm(64)), Int(1,64)), +Def(Tid(1_193, "%000004a9"), Attrs([Attr("address","0x6F8"), +Attr("insn","strb w0, [x19, #0x30]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R19",Imm(64)),Int(48,64)),Extract(7,0,Var("R0",Imm(64))),LittleEndian(),8))]), +Jmps([Goto(Tid(1_857, "%00000741"), Attrs([]), Int(1,1), +Direct(Tid(781, "%0000030d")))])), Blk(Tid(781, "%0000030d"), + Attrs([Attr("address","0x6FC")]), Phis([]), Defs([Def(Tid(791, "%00000317"), + Attrs([Attr("address","0x6FC"), Attr("insn","ldr x19, [sp, #0x10]")]), + Var("R19",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(16,64)),LittleEndian(),64)), +Def(Tid(798, "%0000031e"), Attrs([Attr("address","0x700"), +Attr("insn","ldp x29, x30, [sp], #0x20")]), Var("R29",Imm(64)), +Load(Var("mem",Mem(64,8)),Var("R31",Imm(64)),LittleEndian(),64)), +Def(Tid(803, "%00000323"), Attrs([Attr("address","0x700"), +Attr("insn","ldp x29, x30, [sp], #0x20")]), Var("R30",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(8,64)),LittleEndian(),64)), +Def(Tid(807, "%00000327"), Attrs([Attr("address","0x700"), +Attr("insn","ldp x29, x30, [sp], #0x20")]), Var("R31",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(32,64)))]), Jmps([Call(Tid(812, "%0000032c"), + Attrs([Attr("address","0x704"), Attr("insn","ret")]), Int(1,1), +(Indirect(Var("R30",Imm(64))),))]))])), Sub(Tid(1_858, "@__libc_start_main"), + Attrs([Attr("c.proto","signed (*)(signed (*)(signed , char** , char** );* main, signed , char** , \nvoid* auxv)"), +Attr("address","0x5C0"), Attr("stub","()")]), "__libc_start_main", + Args([Arg(Tid(1_879, "%00000757"), + Attrs([Attr("c.layout","**[ : 64]"), +Attr("c.data","Top:u64 ptr ptr"), +Attr("c.type","signed (*)(signed , char** , char** );*")]), + Var("__libc_start_main_main",Imm(64)), Var("R0",Imm(64)), In()), +Arg(Tid(1_880, "%00000758"), Attrs([Attr("c.layout","[signed : 32]"), +Attr("c.data","Top:u32"), Attr("c.type","signed")]), + Var("__libc_start_main_arg2",Imm(32)), LOW(32,Var("R1",Imm(64))), In()), +Arg(Tid(1_881, "%00000759"), Attrs([Attr("c.layout","**[char : 8]"), +Attr("c.data","Top:u8 ptr ptr"), Attr("c.type","char**")]), + Var("__libc_start_main_arg3",Imm(64)), Var("R2",Imm(64)), Both()), +Arg(Tid(1_882, "%0000075a"), Attrs([Attr("c.layout","*[ : 8]"), +Attr("c.data","{} ptr"), Attr("c.type","void*")]), + Var("__libc_start_main_auxv",Imm(64)), Var("R3",Imm(64)), Both()), +Arg(Tid(1_883, "%0000075b"), Attrs([Attr("c.layout","[signed : 32]"), +Attr("c.data","Top:u32"), Attr("c.type","signed")]), + Var("__libc_start_main_result",Imm(32)), LOW(32,Var("R0",Imm(64))), +Out())]), Blks([Blk(Tid(563, "@__libc_start_main"), + Attrs([Attr("address","0x5C0")]), Phis([]), +Defs([Def(Tid(1_437, "%0000059d"), Attrs([Attr("address","0x5C0"), +Attr("insn","adrp x16, #69632")]), Var("R16",Imm(64)), Int(69632,64)), +Def(Tid(1_444, "%000005a4"), Attrs([Attr("address","0x5C4"), +Attr("insn","ldr x17, [x16]")]), Var("R17",Imm(64)), +Load(Var("mem",Mem(64,8)),Var("R16",Imm(64)),LittleEndian(),64)), +Def(Tid(1_450, "%000005aa"), Attrs([Attr("address","0x5C8"), +Attr("insn","add x16, x16, #0x0")]), Var("R16",Imm(64)), +Var("R16",Imm(64)))]), Jmps([Call(Tid(1_455, "%000005af"), + Attrs([Attr("address","0x5CC"), Attr("insn","br x17")]), Int(1,1), +(Indirect(Var("R17",Imm(64))),))]))])), Sub(Tid(1_859, "@_fini"), + Attrs([Attr("c.proto","signed (*)(void)"), Attr("address","0x7A0")]), + "_fini", Args([Arg(Tid(1_884, "%0000075c"), + Attrs([Attr("c.layout","[signed : 32]"), Attr("c.data","Top:u32"), +Attr("c.type","signed")]), Var("_fini_result",Imm(32)), +LOW(32,Var("R0",Imm(64))), Out())]), Blks([Blk(Tid(31, "@_fini"), + Attrs([Attr("address","0x7A0")]), Phis([]), Defs([Def(Tid(37, "%00000025"), + Attrs([Attr("address","0x7A4"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("#0",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(18446744073709551600,64))), +Def(Tid(43, "%0000002b"), Attrs([Attr("address","0x7A4"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("#0",Imm(64)),Var("R29",Imm(64)),LittleEndian(),64)), +Def(Tid(49, "%00000031"), Attrs([Attr("address","0x7A4"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("#0",Imm(64)),Int(8,64)),Var("R30",Imm(64)),LittleEndian(),64)), +Def(Tid(53, "%00000035"), Attrs([Attr("address","0x7A4"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("R31",Imm(64)), +Var("#0",Imm(64))), Def(Tid(59, "%0000003b"), Attrs([Attr("address","0x7A8"), +Attr("insn","mov x29, sp")]), Var("R29",Imm(64)), Var("R31",Imm(64))), +Def(Tid(66, "%00000042"), Attrs([Attr("address","0x7AC"), +Attr("insn","ldp x29, x30, [sp], #0x10")]), Var("R29",Imm(64)), +Load(Var("mem",Mem(64,8)),Var("R31",Imm(64)),LittleEndian(),64)), +Def(Tid(71, "%00000047"), Attrs([Attr("address","0x7AC"), +Attr("insn","ldp x29, x30, [sp], #0x10")]), Var("R30",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(8,64)),LittleEndian(),64)), +Def(Tid(75, "%0000004b"), Attrs([Attr("address","0x7AC"), +Attr("insn","ldp x29, x30, [sp], #0x10")]), Var("R31",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(16,64)))]), Jmps([Call(Tid(80, "%00000050"), + Attrs([Attr("address","0x7B0"), Attr("insn","ret")]), Int(1,1), +(Indirect(Var("R30",Imm(64))),))]))])), Sub(Tid(1_860, "@_init"), + Attrs([Attr("c.proto","signed (*)(void)"), Attr("address","0x580")]), + "_init", Args([Arg(Tid(1_885, "%0000075d"), + Attrs([Attr("c.layout","[signed : 32]"), Attr("c.data","Top:u32"), +Attr("c.type","signed")]), Var("_init_result",Imm(32)), +LOW(32,Var("R0",Imm(64))), Out())]), Blks([Blk(Tid(1_660, "@_init"), + Attrs([Attr("address","0x580")]), Phis([]), +Defs([Def(Tid(1_666, "%00000682"), Attrs([Attr("address","0x584"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("#9",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(18446744073709551600,64))), +Def(Tid(1_672, "%00000688"), Attrs([Attr("address","0x584"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("#9",Imm(64)),Var("R29",Imm(64)),LittleEndian(),64)), +Def(Tid(1_678, "%0000068e"), Attrs([Attr("address","0x584"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("#9",Imm(64)),Int(8,64)),Var("R30",Imm(64)),LittleEndian(),64)), +Def(Tid(1_682, "%00000692"), Attrs([Attr("address","0x584"), +Attr("insn","stp x29, x30, [sp, #-0x10]!")]), Var("R31",Imm(64)), +Var("#9",Imm(64))), Def(Tid(1_688, "%00000698"), + Attrs([Attr("address","0x588"), Attr("insn","mov x29, sp")]), + Var("R29",Imm(64)), Var("R31",Imm(64))), Def(Tid(1_693, "%0000069d"), + Attrs([Attr("address","0x58C"), Attr("insn","bl #0xa8")]), + Var("R30",Imm(64)), Int(1424,64))]), Jmps([Call(Tid(1_695, "%0000069f"), + Attrs([Attr("address","0x58C"), Attr("insn","bl #0xa8")]), Int(1,1), +(Direct(Tid(1_865, "@call_weak_fn")),Direct(Tid(1_697, "%000006a1"))))])), +Blk(Tid(1_697, "%000006a1"), Attrs([Attr("address","0x590")]), Phis([]), +Defs([Def(Tid(1_702, "%000006a6"), Attrs([Attr("address","0x590"), +Attr("insn","ldp x29, x30, [sp], #0x10")]), Var("R29",Imm(64)), +Load(Var("mem",Mem(64,8)),Var("R31",Imm(64)),LittleEndian(),64)), +Def(Tid(1_707, "%000006ab"), Attrs([Attr("address","0x590"), +Attr("insn","ldp x29, x30, [sp], #0x10")]), Var("R30",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(8,64)),LittleEndian(),64)), +Def(Tid(1_711, "%000006af"), Attrs([Attr("address","0x590"), +Attr("insn","ldp x29, x30, [sp], #0x10")]), Var("R31",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(16,64)))]), Jmps([Call(Tid(1_716, "%000006b4"), + Attrs([Attr("address","0x594"), Attr("insn","ret")]), Int(1,1), +(Indirect(Var("R30",Imm(64))),))]))])), Sub(Tid(1_861, "@_start"), + Attrs([Attr("c.proto","signed (*)(void)"), Attr("address","0x600"), +Attr("stub","()"), Attr("entry-point","()")]), "_start", + Args([Arg(Tid(1_886, "%0000075e"), Attrs([Attr("c.layout","[signed : 32]"), +Attr("c.data","Top:u32"), Attr("c.type","signed")]), + Var("_start_result",Imm(32)), LOW(32,Var("R0",Imm(64))), Out())]), +Blks([Blk(Tid(500, "@_start"), Attrs([Attr("address","0x600")]), Phis([]), +Defs([Def(Tid(505, "%000001f9"), Attrs([Attr("address","0x604"), +Attr("insn","mov x29, #0x0")]), Var("R29",Imm(64)), Int(0,64)), +Def(Tid(510, "%000001fe"), Attrs([Attr("address","0x608"), +Attr("insn","mov x30, #0x0")]), Var("R30",Imm(64)), Int(0,64)), +Def(Tid(516, "%00000204"), Attrs([Attr("address","0x60C"), +Attr("insn","mov x5, x0")]), Var("R5",Imm(64)), Var("R0",Imm(64))), +Def(Tid(523, "%0000020b"), Attrs([Attr("address","0x610"), +Attr("insn","ldr x1, [sp]")]), Var("R1",Imm(64)), +Load(Var("mem",Mem(64,8)),Var("R31",Imm(64)),LittleEndian(),64)), +Def(Tid(529, "%00000211"), Attrs([Attr("address","0x614"), +Attr("insn","add x2, sp, #0x8")]), Var("R2",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(8,64))), Def(Tid(535, "%00000217"), + Attrs([Attr("address","0x618"), Attr("insn","mov x6, sp")]), + Var("R6",Imm(64)), Var("R31",Imm(64))), Def(Tid(540, "%0000021c"), + Attrs([Attr("address","0x61C"), Attr("insn","adrp x0, #65536")]), + Var("R0",Imm(64)), Int(65536,64)), Def(Tid(547, "%00000223"), + Attrs([Attr("address","0x620"), Attr("insn","ldr x0, [x0, #0xfd8]")]), + Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R0",Imm(64)),Int(4056,64)),LittleEndian(),64)), +Def(Tid(552, "%00000228"), Attrs([Attr("address","0x624"), +Attr("insn","mov x3, #0x0")]), Var("R3",Imm(64)), Int(0,64)), +Def(Tid(557, "%0000022d"), Attrs([Attr("address","0x628"), +Attr("insn","mov x4, #0x0")]), Var("R4",Imm(64)), Int(0,64)), +Def(Tid(562, "%00000232"), Attrs([Attr("address","0x62C"), +Attr("insn","bl #-0x6c")]), Var("R30",Imm(64)), Int(1584,64))]), +Jmps([Call(Tid(565, "%00000235"), Attrs([Attr("address","0x62C"), +Attr("insn","bl #-0x6c")]), Int(1,1), +(Direct(Tid(1_858, "@__libc_start_main")),Direct(Tid(567, "%00000237"))))])), +Blk(Tid(567, "%00000237"), Attrs([Attr("address","0x630")]), Phis([]), +Defs([Def(Tid(570, "%0000023a"), Attrs([Attr("address","0x630"), +Attr("insn","bl #-0x40")]), Var("R30",Imm(64)), Int(1588,64))]), +Jmps([Call(Tid(573, "%0000023d"), Attrs([Attr("address","0x630"), +Attr("insn","bl #-0x40")]), Int(1,1), +(Direct(Tid(1_864, "@abort")),Direct(Tid(1_862, "%00000746"))))])), +Blk(Tid(1_862, "%00000746"), Attrs([]), Phis([]), Defs([]), +Jmps([Call(Tid(1_863, "%00000747"), Attrs([]), Int(1,1), +(Direct(Tid(1_865, "@call_weak_fn")),))]))])), Sub(Tid(1_864, "@abort"), + Attrs([Attr("noreturn","()"), Attr("c.proto","void (*)(void)"), +Attr("address","0x5F0"), Attr("stub","()")]), "abort", Args([]), +Blks([Blk(Tid(571, "@abort"), Attrs([Attr("address","0x5F0")]), Phis([]), +Defs([Def(Tid(1_503, "%000005df"), Attrs([Attr("address","0x5F0"), +Attr("insn","adrp x16, #69632")]), Var("R16",Imm(64)), Int(69632,64)), +Def(Tid(1_510, "%000005e6"), Attrs([Attr("address","0x5F4"), +Attr("insn","ldr x17, [x16, #0x18]")]), Var("R17",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R16",Imm(64)),Int(24,64)),LittleEndian(),64)), +Def(Tid(1_516, "%000005ec"), Attrs([Attr("address","0x5F8"), +Attr("insn","add x16, x16, #0x18")]), Var("R16",Imm(64)), +PLUS(Var("R16",Imm(64)),Int(24,64)))]), Jmps([Call(Tid(1_521, "%000005f1"), + Attrs([Attr("address","0x5FC"), Attr("insn","br x17")]), Int(1,1), +(Indirect(Var("R17",Imm(64))),))]))])), Sub(Tid(1_865, "@call_weak_fn"), + Attrs([Attr("c.proto","signed (*)(void)"), Attr("address","0x634")]), + "call_weak_fn", Args([Arg(Tid(1_887, "%0000075f"), + Attrs([Attr("c.layout","[signed : 32]"), Attr("c.data","Top:u32"), +Attr("c.type","signed")]), Var("call_weak_fn_result",Imm(32)), +LOW(32,Var("R0",Imm(64))), Out())]), Blks([Blk(Tid(575, "@call_weak_fn"), + Attrs([Attr("address","0x634")]), Phis([]), Defs([Def(Tid(578, "%00000242"), + Attrs([Attr("address","0x634"), Attr("insn","adrp x0, #65536")]), + Var("R0",Imm(64)), Int(65536,64)), Def(Tid(585, "%00000249"), + Attrs([Attr("address","0x638"), Attr("insn","ldr x0, [x0, #0xfd0]")]), + Var("R0",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R0",Imm(64)),Int(4048,64)),LittleEndian(),64))]), +Jmps([Goto(Tid(591, "%0000024f"), Attrs([Attr("address","0x63C"), +Attr("insn","cbz x0, #0x8")]), EQ(Var("R0",Imm(64)),Int(0,64)), +Direct(Tid(589, "%0000024d"))), Goto(Tid(1_866, "%0000074a"), Attrs([]), + Int(1,1), Direct(Tid(1_275, "%000004fb")))])), Blk(Tid(589, "%0000024d"), + Attrs([Attr("address","0x644")]), Phis([]), Defs([]), +Jmps([Call(Tid(597, "%00000255"), Attrs([Attr("address","0x644"), +Attr("insn","ret")]), Int(1,1), (Indirect(Var("R30",Imm(64))),))])), +Blk(Tid(1_275, "%000004fb"), Attrs([Attr("address","0x640")]), Phis([]), +Defs([]), Jmps([Goto(Tid(1_278, "%000004fe"), Attrs([Attr("address","0x640"), +Attr("insn","b #-0x60")]), Int(1,1), +Direct(Tid(1_276, "@__gmon_start__")))])), Blk(Tid(1_276, "@__gmon_start__"), + Attrs([Attr("address","0x5E0")]), Phis([]), +Defs([Def(Tid(1_481, "%000005c9"), Attrs([Attr("address","0x5E0"), +Attr("insn","adrp x16, #69632")]), Var("R16",Imm(64)), Int(69632,64)), +Def(Tid(1_488, "%000005d0"), Attrs([Attr("address","0x5E4"), +Attr("insn","ldr x17, [x16, #0x10]")]), Var("R17",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R16",Imm(64)),Int(16,64)),LittleEndian(),64)), +Def(Tid(1_494, "%000005d6"), Attrs([Attr("address","0x5E8"), +Attr("insn","add x16, x16, #0x10")]), Var("R16",Imm(64)), +PLUS(Var("R16",Imm(64)),Int(16,64)))]), Jmps([Call(Tid(1_499, "%000005db"), + Attrs([Attr("address","0x5EC"), Attr("insn","br x17")]), Int(1,1), +(Indirect(Var("R17",Imm(64))),))]))])), +Sub(Tid(1_867, "@deregister_tm_clones"), + Attrs([Attr("c.proto","signed (*)(void)"), Attr("address","0x650")]), + "deregister_tm_clones", Args([Arg(Tid(1_888, "%00000760"), + Attrs([Attr("c.layout","[signed : 32]"), Attr("c.data","Top:u32"), +Attr("c.type","signed")]), Var("deregister_tm_clones_result",Imm(32)), +LOW(32,Var("R0",Imm(64))), Out())]), +Blks([Blk(Tid(603, "@deregister_tm_clones"), + Attrs([Attr("address","0x650")]), Phis([]), Defs([Def(Tid(606, "%0000025e"), + Attrs([Attr("address","0x650"), Attr("insn","adrp x0, #69632")]), + Var("R0",Imm(64)), Int(69632,64)), Def(Tid(612, "%00000264"), + Attrs([Attr("address","0x654"), Attr("insn","add x0, x0, #0x30")]), + Var("R0",Imm(64)), PLUS(Var("R0",Imm(64)),Int(48,64))), +Def(Tid(617, "%00000269"), Attrs([Attr("address","0x658"), +Attr("insn","adrp x1, #69632")]), Var("R1",Imm(64)), Int(69632,64)), +Def(Tid(623, "%0000026f"), Attrs([Attr("address","0x65C"), +Attr("insn","add x1, x1, #0x30")]), Var("R1",Imm(64)), +PLUS(Var("R1",Imm(64)),Int(48,64))), Def(Tid(629, "%00000275"), + Attrs([Attr("address","0x660"), Attr("insn","cmp x1, x0")]), + Var("#1",Imm(64)), NOT(Var("R0",Imm(64)))), Def(Tid(634, "%0000027a"), + Attrs([Attr("address","0x660"), Attr("insn","cmp x1, x0")]), + Var("#2",Imm(64)), PLUS(Var("R1",Imm(64)),NOT(Var("R0",Imm(64))))), +Def(Tid(640, "%00000280"), Attrs([Attr("address","0x660"), +Attr("insn","cmp x1, x0")]), Var("VF",Imm(1)), +NEQ(SIGNED(65,PLUS(Var("#2",Imm(64)),Int(1,64))),PLUS(PLUS(SIGNED(65,Var("R1",Imm(64))),SIGNED(65,Var("#1",Imm(64)))),Int(1,65)))), +Def(Tid(646, "%00000286"), Attrs([Attr("address","0x660"), +Attr("insn","cmp x1, x0")]), Var("CF",Imm(1)), +NEQ(UNSIGNED(65,PLUS(Var("#2",Imm(64)),Int(1,64))),PLUS(PLUS(UNSIGNED(65,Var("R1",Imm(64))),UNSIGNED(65,Var("#1",Imm(64)))),Int(1,65)))), +Def(Tid(650, "%0000028a"), Attrs([Attr("address","0x660"), +Attr("insn","cmp x1, x0")]), Var("ZF",Imm(1)), +EQ(PLUS(Var("#2",Imm(64)),Int(1,64)),Int(0,64))), Def(Tid(654, "%0000028e"), + Attrs([Attr("address","0x660"), Attr("insn","cmp x1, x0")]), + Var("NF",Imm(1)), Extract(63,63,PLUS(Var("#2",Imm(64)),Int(1,64))))]), +Jmps([Goto(Tid(660, "%00000294"), Attrs([Attr("address","0x664"), +Attr("insn","b.eq #0x18")]), EQ(Var("ZF",Imm(1)),Int(1,1)), +Direct(Tid(658, "%00000292"))), Goto(Tid(1_868, "%0000074c"), Attrs([]), + Int(1,1), Direct(Tid(1_245, "%000004dd")))])), Blk(Tid(1_245, "%000004dd"), + Attrs([Attr("address","0x668")]), Phis([]), +Defs([Def(Tid(1_248, "%000004e0"), Attrs([Attr("address","0x668"), +Attr("insn","adrp x1, #65536")]), Var("R1",Imm(64)), Int(65536,64)), +Def(Tid(1_255, "%000004e7"), Attrs([Attr("address","0x66C"), +Attr("insn","ldr x1, [x1, #0xfc0]")]), Var("R1",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R1",Imm(64)),Int(4032,64)),LittleEndian(),64))]), +Jmps([Goto(Tid(1_260, "%000004ec"), Attrs([Attr("address","0x670"), +Attr("insn","cbz x1, #0xc")]), EQ(Var("R1",Imm(64)),Int(0,64)), +Direct(Tid(658, "%00000292"))), Goto(Tid(1_869, "%0000074d"), Attrs([]), + Int(1,1), Direct(Tid(1_264, "%000004f0")))])), Blk(Tid(658, "%00000292"), + Attrs([Attr("address","0x67C")]), Phis([]), Defs([]), +Jmps([Call(Tid(666, "%0000029a"), Attrs([Attr("address","0x67C"), +Attr("insn","ret")]), Int(1,1), (Indirect(Var("R30",Imm(64))),))])), +Blk(Tid(1_264, "%000004f0"), Attrs([Attr("address","0x674")]), Phis([]), +Defs([Def(Tid(1_268, "%000004f4"), Attrs([Attr("address","0x674"), +Attr("insn","mov x16, x1")]), Var("R16",Imm(64)), Var("R1",Imm(64)))]), +Jmps([Call(Tid(1_273, "%000004f9"), Attrs([Attr("address","0x678"), +Attr("insn","br x16")]), Int(1,1), (Indirect(Var("R16",Imm(64))),))]))])), +Sub(Tid(1_870, "@frame_dummy"), Attrs([Attr("c.proto","signed (*)(void)"), +Attr("address","0x710")]), "frame_dummy", Args([Arg(Tid(1_889, "%00000761"), + Attrs([Attr("c.layout","[signed : 32]"), Attr("c.data","Top:u32"), +Attr("c.type","signed")]), Var("frame_dummy_result",Imm(32)), +LOW(32,Var("R0",Imm(64))), Out())]), Blks([Blk(Tid(818, "@frame_dummy"), + Attrs([Attr("address","0x710")]), Phis([]), Defs([]), +Jmps([Call(Tid(820, "%00000334"), Attrs([Attr("address","0x710"), +Attr("insn","b #-0x90")]), Int(1,1), +(Direct(Tid(1_872, "@register_tm_clones")),))]))])), Sub(Tid(1_871, "@main"), + Attrs([Attr("c.proto","signed (*)(signed argc, const char** argv)"), +Attr("address","0x744")]), "main", Args([Arg(Tid(1_890, "%00000762"), + Attrs([Attr("c.layout","[signed : 32]"), Attr("c.data","Top:u32"), +Attr("c.type","signed")]), Var("main_argc",Imm(32)), +LOW(32,Var("R0",Imm(64))), In()), Arg(Tid(1_891, "%00000763"), + Attrs([Attr("c.layout","**[char : 8]"), Attr("c.data","Top:u8 ptr ptr"), +Attr("c.type"," const char**")]), Var("main_argv",Imm(64)), +Var("R1",Imm(64)), Both()), Arg(Tid(1_892, "%00000764"), + Attrs([Attr("c.layout","[signed : 32]"), Attr("c.data","Top:u32"), +Attr("c.type","signed")]), Var("main_result",Imm(32)), +LOW(32,Var("R0",Imm(64))), Out())]), Blks([Blk(Tid(944, "@main"), + Attrs([Attr("address","0x744")]), Phis([]), Defs([Def(Tid(948, "%000003b4"), + Attrs([Attr("address","0x744"), Attr("insn","sub sp, sp, #0x10")]), + Var("R31",Imm(64)), PLUS(Var("R31",Imm(64)),Int(18446744073709551600,64))), +Def(Tid(954, "%000003ba"), Attrs([Attr("address","0x748"), +Attr("insn","add w9, w0, #0x4")]), Var("R9",Imm(64)), +UNSIGNED(64,PLUS(Extract(31,0,Var("R0",Imm(64))),Int(4,32)))), +Def(Tid(961, "%000003c1"), Attrs([Attr("address","0x74C"), +Attr("insn","add w11, w0, w0, lsl #2")]), Var("R11",Imm(64)), +UNSIGNED(64,PLUS(Extract(31,0,Var("R0",Imm(64))),Concat(Extract(29,0,Var("R0",Imm(64))),Int(0,2))))), +Def(Tid(966, "%000003c6"), Attrs([Attr("address","0x750"), +Attr("insn","mov w8, #0x6")]), Var("R8",Imm(64)), Int(6,64)), +Def(Tid(972, "%000003cc"), Attrs([Attr("address","0x754"), +Attr("insn","mov x10, sp")]), Var("R10",Imm(64)), Var("R31",Imm(64))), +Def(Tid(978, "%000003d2"), Attrs([Attr("address","0x758"), +Attr("insn","sbfiz x13, x0, #2, #32")]), Var("R13",Imm(64)), +SIGNED(64,Concat(Extract(31,0,Var("R0",Imm(64))),Int(0,2)))), +Def(Tid(985, "%000003d9"), Attrs([Attr("address","0x75C"), +Attr("insn","sub w8, w8, w0")]), Var("R8",Imm(64)), +UNSIGNED(64,PLUS(PLUS(Extract(31,0,Var("R8",Imm(64))),NOT(Extract(31,0,Var("R0",Imm(64))))),Int(1,32)))), +Def(Tid(991, "%000003df"), Attrs([Attr("address","0x760"), +Attr("insn","add x12, x10, #0x8")]), Var("R12",Imm(64)), +PLUS(Var("R10",Imm(64)),Int(8,64))), Def(Tid(997, "%000003e5"), + Attrs([Attr("address","0x764"), Attr("insn","cmp w0, #0x3")]), + Var("#6",Imm(32)), +PLUS(Extract(31,0,Var("R0",Imm(64))),Int(4294967292,32))), +Def(Tid(1_002, "%000003ea"), Attrs([Attr("address","0x764"), +Attr("insn","cmp w0, #0x3")]), Var("VF",Imm(1)), +NEQ(SIGNED(33,PLUS(Var("#6",Imm(32)),Int(1,32))),PLUS(SIGNED(33,Extract(31,0,Var("R0",Imm(64)))),Int(8589934589,33)))), +Def(Tid(1_007, "%000003ef"), Attrs([Attr("address","0x764"), +Attr("insn","cmp w0, #0x3")]), Var("CF",Imm(1)), +NEQ(UNSIGNED(33,PLUS(Var("#6",Imm(32)),Int(1,32))),PLUS(UNSIGNED(33,Extract(31,0,Var("R0",Imm(64)))),Int(4294967293,33)))), +Def(Tid(1_011, "%000003f3"), Attrs([Attr("address","0x764"), +Attr("insn","cmp w0, #0x3")]), Var("ZF",Imm(1)), +EQ(PLUS(Var("#6",Imm(32)),Int(1,32)),Int(0,32))), +Def(Tid(1_015, "%000003f7"), Attrs([Attr("address","0x764"), +Attr("insn","cmp w0, #0x3")]), Var("NF",Imm(1)), +Extract(31,31,PLUS(Var("#6",Imm(32)),Int(1,32)))), +Def(Tid(1_022, "%000003fe"), Attrs([Attr("address","0x768"), +Attr("insn","add x10, x10, x13")]), Var("R10",Imm(64)), +PLUS(Var("R10",Imm(64)),Var("R13",Imm(64)))), Def(Tid(1_030, "%00000406"), + Attrs([Attr("address","0x76C"), Attr("insn","stp w9, w11, [sp]")]), + Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("R31",Imm(64)),Extract(31,0,Var("R9",Imm(64))),LittleEndian(),32)), +Def(Tid(1_036, "%0000040c"), Attrs([Attr("address","0x76C"), +Attr("insn","stp w9, w11, [sp]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(4,64)),Extract(31,0,Var("R11",Imm(64))),LittleEndian(),32))]), +Jmps([Goto(Tid(1_051, "%0000041b"), Attrs([Attr("address","0x770"), +Attr("insn","csel x9, x10, x12, lt")]), + NEQ(Var("NF",Imm(1)),Var("VF",Imm(1))), Direct(Tid(1_043, "%00000413"))), +Goto(Tid(1_052, "%0000041c"), Attrs([Attr("address","0x770"), +Attr("insn","csel x9, x10, x12, lt")]), Int(1,1), +Direct(Tid(1_047, "%00000417")))])), Blk(Tid(1_047, "%00000417"), Attrs([]), + Phis([]), Defs([Def(Tid(1_048, "%00000418"), Attrs([Attr("address","0x770"), +Attr("insn","csel x9, x10, x12, lt")]), Var("R9",Imm(64)), +Var("R12",Imm(64)))]), Jmps([Goto(Tid(1_054, "%0000041e"), + Attrs([Attr("address","0x770"), Attr("insn","csel x9, x10, x12, lt")]), + Int(1,1), Direct(Tid(1_050, "%0000041a")))])), Blk(Tid(1_043, "%00000413"), + Attrs([]), Phis([]), Defs([Def(Tid(1_044, "%00000414"), + Attrs([Attr("address","0x770"), Attr("insn","csel x9, x10, x12, lt")]), + Var("R9",Imm(64)), Var("R10",Imm(64)))]), +Jmps([Goto(Tid(1_053, "%0000041d"), Attrs([Attr("address","0x770"), +Attr("insn","csel x9, x10, x12, lt")]), Int(1,1), +Direct(Tid(1_050, "%0000041a")))])), Blk(Tid(1_050, "%0000041a"), Attrs([]), + Phis([]), Defs([Def(Tid(1_059, "%00000423"), Attrs([Attr("address","0x774"), +Attr("insn","mov w13, #0x5a")]), Var("R13",Imm(64)), Int(90,64))]), +Jmps([Goto(Tid(1_074, "%00000432"), Attrs([Attr("address","0x778"), +Attr("insn","csel x10, x10, x12, lt")]), + NEQ(Var("NF",Imm(1)),Var("VF",Imm(1))), Direct(Tid(1_066, "%0000042a"))), +Goto(Tid(1_075, "%00000433"), Attrs([Attr("address","0x778"), +Attr("insn","csel x10, x10, x12, lt")]), Int(1,1), +Direct(Tid(1_070, "%0000042e")))])), Blk(Tid(1_070, "%0000042e"), Attrs([]), + Phis([]), Defs([Def(Tid(1_071, "%0000042f"), Attrs([Attr("address","0x778"), +Attr("insn","csel x10, x10, x12, lt")]), Var("R10",Imm(64)), +Var("R12",Imm(64)))]), Jmps([Goto(Tid(1_077, "%00000435"), + Attrs([Attr("address","0x778"), Attr("insn","csel x10, x10, x12, lt")]), + Int(1,1), Direct(Tid(1_073, "%00000431")))])), Blk(Tid(1_066, "%0000042a"), + Attrs([]), Phis([]), Defs([Def(Tid(1_067, "%0000042b"), + Attrs([Attr("address","0x778"), Attr("insn","csel x10, x10, x12, lt")]), + Var("R10",Imm(64)), Var("R10",Imm(64)))]), +Jmps([Goto(Tid(1_076, "%00000434"), Attrs([Attr("address","0x778"), +Attr("insn","csel x10, x10, x12, lt")]), Int(1,1), +Direct(Tid(1_073, "%00000431")))])), Blk(Tid(1_073, "%00000431"), Attrs([]), + Phis([]), Defs([Def(Tid(1_083, "%0000043b"), Attrs([Attr("address","0x77C"), +Attr("insn","stp w8, wzr, [sp, #0x8]")]), Var("#7",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(8,64))), Def(Tid(1_089, "%00000441"), + Attrs([Attr("address","0x77C"), Attr("insn","stp w8, wzr, [sp, #0x8]")]), + Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("#7",Imm(64)),Extract(31,0,Var("R8",Imm(64))),LittleEndian(),32)), +Def(Tid(1_094, "%00000446"), Attrs([Attr("address","0x77C"), +Attr("insn","stp w8, wzr, [sp, #0x8]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("#7",Imm(64)),Int(4,64)),Int(0,32),LittleEndian(),32)), +Def(Tid(1_101, "%0000044d"), Attrs([Attr("address","0x780"), +Attr("insn","ldr w9, [x9]")]), Var("R9",Imm(64)), +UNSIGNED(64,Load(Var("mem",Mem(64,8)),Var("R9",Imm(64)),LittleEndian(),32))), +Def(Tid(1_109, "%00000455"), Attrs([Attr("address","0x784"), +Attr("insn","stp w11, w8, [sp]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("R31",Imm(64)),Extract(31,0,Var("R11",Imm(64))),LittleEndian(),32)), +Def(Tid(1_115, "%0000045b"), Attrs([Attr("address","0x784"), +Attr("insn","stp w11, w8, [sp]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(4,64)),Extract(31,0,Var("R8",Imm(64))),LittleEndian(),32)), +Def(Tid(1_123, "%00000463"), Attrs([Attr("address","0x788"), +Attr("insn","str x13, [sp, #0x8]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(8,64)),Var("R13",Imm(64)),LittleEndian(),64)), +Def(Tid(1_130, "%0000046a"), Attrs([Attr("address","0x78C"), +Attr("insn","ldr w8, [x10]")]), Var("R8",Imm(64)), +UNSIGNED(64,Load(Var("mem",Mem(64,8)),Var("R10",Imm(64)),LittleEndian(),32))), +Def(Tid(1_137, "%00000471"), Attrs([Attr("address","0x790"), +Attr("insn","add w8, w9, w8")]), Var("R8",Imm(64)), +UNSIGNED(64,PLUS(Extract(31,0,Var("R9",Imm(64))),Extract(31,0,Var("R8",Imm(64)))))), +Def(Tid(1_143, "%00000477"), Attrs([Attr("address","0x794"), +Attr("insn","add w0, w8, #0x2")]), Var("R0",Imm(64)), +UNSIGNED(64,PLUS(Extract(31,0,Var("R8",Imm(64))),Int(2,32)))), +Def(Tid(1_149, "%0000047d"), Attrs([Attr("address","0x798"), +Attr("insn","add sp, sp, #0x10")]), Var("R31",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(16,64)))]), Jmps([Call(Tid(1_154, "%00000482"), + Attrs([Attr("address","0x79C"), Attr("insn","ret")]), Int(1,1), +(Indirect(Var("R30",Imm(64))),))]))])), +Sub(Tid(1_872, "@register_tm_clones"), + Attrs([Attr("c.proto","signed (*)(void)"), Attr("address","0x680")]), + "register_tm_clones", Args([Arg(Tid(1_893, "%00000765"), + Attrs([Attr("c.layout","[signed : 32]"), Attr("c.data","Top:u32"), +Attr("c.type","signed")]), Var("register_tm_clones_result",Imm(32)), +LOW(32,Var("R0",Imm(64))), Out())]), +Blks([Blk(Tid(668, "@register_tm_clones"), Attrs([Attr("address","0x680")]), + Phis([]), Defs([Def(Tid(671, "%0000029f"), Attrs([Attr("address","0x680"), +Attr("insn","adrp x0, #69632")]), Var("R0",Imm(64)), Int(69632,64)), +Def(Tid(677, "%000002a5"), Attrs([Attr("address","0x684"), +Attr("insn","add x0, x0, #0x30")]), Var("R0",Imm(64)), +PLUS(Var("R0",Imm(64)),Int(48,64))), Def(Tid(682, "%000002aa"), + Attrs([Attr("address","0x688"), Attr("insn","adrp x1, #69632")]), + Var("R1",Imm(64)), Int(69632,64)), Def(Tid(688, "%000002b0"), + Attrs([Attr("address","0x68C"), Attr("insn","add x1, x1, #0x30")]), + Var("R1",Imm(64)), PLUS(Var("R1",Imm(64)),Int(48,64))), +Def(Tid(695, "%000002b7"), Attrs([Attr("address","0x690"), +Attr("insn","sub x1, x1, x0")]), Var("R1",Imm(64)), +PLUS(PLUS(Var("R1",Imm(64)),NOT(Var("R0",Imm(64)))),Int(1,64))), +Def(Tid(701, "%000002bd"), Attrs([Attr("address","0x694"), +Attr("insn","lsr x2, x1, #63")]), Var("R2",Imm(64)), +Concat(Int(0,63),Extract(63,63,Var("R1",Imm(64))))), +Def(Tid(708, "%000002c4"), Attrs([Attr("address","0x698"), +Attr("insn","add x1, x2, x1, asr #3")]), Var("R1",Imm(64)), +PLUS(Var("R2",Imm(64)),ARSHIFT(Var("R1",Imm(64)),Int(3,3)))), +Def(Tid(714, "%000002ca"), Attrs([Attr("address","0x69C"), +Attr("insn","asr x1, x1, #1")]), Var("R1",Imm(64)), +SIGNED(64,Extract(63,1,Var("R1",Imm(64)))))]), +Jmps([Goto(Tid(720, "%000002d0"), Attrs([Attr("address","0x6A0"), +Attr("insn","cbz x1, #0x18")]), EQ(Var("R1",Imm(64)),Int(0,64)), +Direct(Tid(718, "%000002ce"))), Goto(Tid(1_873, "%00000751"), Attrs([]), + Int(1,1), Direct(Tid(1_215, "%000004bf")))])), Blk(Tid(1_215, "%000004bf"), + Attrs([Attr("address","0x6A4")]), Phis([]), +Defs([Def(Tid(1_218, "%000004c2"), Attrs([Attr("address","0x6A4"), +Attr("insn","adrp x2, #65536")]), Var("R2",Imm(64)), Int(65536,64)), +Def(Tid(1_225, "%000004c9"), Attrs([Attr("address","0x6A8"), +Attr("insn","ldr x2, [x2, #0xfe0]")]), Var("R2",Imm(64)), +Load(Var("mem",Mem(64,8)),PLUS(Var("R2",Imm(64)),Int(4064,64)),LittleEndian(),64))]), +Jmps([Goto(Tid(1_230, "%000004ce"), Attrs([Attr("address","0x6AC"), +Attr("insn","cbz x2, #0xc")]), EQ(Var("R2",Imm(64)),Int(0,64)), +Direct(Tid(718, "%000002ce"))), Goto(Tid(1_874, "%00000752"), Attrs([]), + Int(1,1), Direct(Tid(1_234, "%000004d2")))])), Blk(Tid(718, "%000002ce"), + Attrs([Attr("address","0x6B8")]), Phis([]), Defs([]), +Jmps([Call(Tid(726, "%000002d6"), Attrs([Attr("address","0x6B8"), +Attr("insn","ret")]), Int(1,1), (Indirect(Var("R30",Imm(64))),))])), +Blk(Tid(1_234, "%000004d2"), Attrs([Attr("address","0x6B0")]), Phis([]), +Defs([Def(Tid(1_238, "%000004d6"), Attrs([Attr("address","0x6B0"), +Attr("insn","mov x16, x2")]), Var("R16",Imm(64)), Var("R2",Imm(64)))]), +Jmps([Call(Tid(1_243, "%000004db"), Attrs([Attr("address","0x6B4"), +Attr("insn","br x16")]), Int(1,1), (Indirect(Var("R16",Imm(64))),))]))])), +Sub(Tid(1_875, "@test"), Attrs([Attr("c.proto","signed (*)(void)"), +Attr("address","0x714")]), "test", Args([Arg(Tid(1_894, "%00000766"), + Attrs([Attr("c.layout","[signed : 32]"), Attr("c.data","Top:u32"), +Attr("c.type","signed")]), Var("test_result",Imm(32)), +LOW(32,Var("R0",Imm(64))), Out())]), Blks([Blk(Tid(822, "@test"), + Attrs([Attr("address","0x714")]), Phis([]), Defs([Def(Tid(826, "%0000033a"), + Attrs([Attr("address","0x714"), Attr("insn","sub sp, sp, #0x10")]), + Var("R31",Imm(64)), PLUS(Var("R31",Imm(64)),Int(18446744073709551600,64))), +Def(Tid(832, "%00000340"), Attrs([Attr("address","0x718"), +Attr("insn","mov x8, sp")]), Var("R8",Imm(64)), Var("R31",Imm(64))), +Def(Tid(838, "%00000346"), Attrs([Attr("address","0x71C"), +Attr("insn","cmp w3, #0x3")]), Var("#4",Imm(32)), +PLUS(Extract(31,0,Var("R3",Imm(64))),Int(4294967292,32))), +Def(Tid(843, "%0000034b"), Attrs([Attr("address","0x71C"), +Attr("insn","cmp w3, #0x3")]), Var("VF",Imm(1)), +NEQ(SIGNED(33,PLUS(Var("#4",Imm(32)),Int(1,32))),PLUS(SIGNED(33,Extract(31,0,Var("R3",Imm(64)))),Int(8589934589,33)))), +Def(Tid(848, "%00000350"), Attrs([Attr("address","0x71C"), +Attr("insn","cmp w3, #0x3")]), Var("CF",Imm(1)), +NEQ(UNSIGNED(33,PLUS(Var("#4",Imm(32)),Int(1,32))),PLUS(UNSIGNED(33,Extract(31,0,Var("R3",Imm(64)))),Int(4294967293,33)))), +Def(Tid(852, "%00000354"), Attrs([Attr("address","0x71C"), +Attr("insn","cmp w3, #0x3")]), Var("ZF",Imm(1)), +EQ(PLUS(Var("#4",Imm(32)),Int(1,32)),Int(0,32))), Def(Tid(856, "%00000358"), + Attrs([Attr("address","0x71C"), Attr("insn","cmp w3, #0x3")]), + Var("NF",Imm(1)), Extract(31,31,PLUS(Var("#4",Imm(32)),Int(1,32)))), +Def(Tid(863, "%0000035f"), Attrs([Attr("address","0x720"), +Attr("insn","add x9, x8, w3, sxtw #2")]), Var("R9",Imm(64)), +PLUS(Var("R8",Imm(64)),SIGNED(64,Concat(Extract(31,0,Var("R3",Imm(64))),Int(0,2))))), +Def(Tid(869, "%00000365"), Attrs([Attr("address","0x724"), +Attr("insn","add x8, x8, #0x8")]), Var("R8",Imm(64)), +PLUS(Var("R8",Imm(64)),Int(8,64)))]), Jmps([Goto(Tid(884, "%00000374"), + Attrs([Attr("address","0x728"), Attr("insn","csel x8, x9, x8, lt")]), + NEQ(Var("NF",Imm(1)),Var("VF",Imm(1))), Direct(Tid(876, "%0000036c"))), +Goto(Tid(885, "%00000375"), Attrs([Attr("address","0x728"), +Attr("insn","csel x8, x9, x8, lt")]), Int(1,1), +Direct(Tid(880, "%00000370")))])), Blk(Tid(880, "%00000370"), Attrs([]), + Phis([]), Defs([Def(Tid(881, "%00000371"), Attrs([Attr("address","0x728"), +Attr("insn","csel x8, x9, x8, lt")]), Var("R8",Imm(64)), +Var("R8",Imm(64)))]), Jmps([Goto(Tid(887, "%00000377"), + Attrs([Attr("address","0x728"), Attr("insn","csel x8, x9, x8, lt")]), + Int(1,1), Direct(Tid(883, "%00000373")))])), Blk(Tid(876, "%0000036c"), + Attrs([]), Phis([]), Defs([Def(Tid(877, "%0000036d"), + Attrs([Attr("address","0x728"), Attr("insn","csel x8, x9, x8, lt")]), + Var("R8",Imm(64)), Var("R9",Imm(64)))]), Jmps([Goto(Tid(886, "%00000376"), + Attrs([Attr("address","0x728"), Attr("insn","csel x8, x9, x8, lt")]), + Int(1,1), Direct(Tid(883, "%00000373")))])), Blk(Tid(883, "%00000373"), + Attrs([]), Phis([]), Defs([Def(Tid(895, "%0000037f"), + Attrs([Attr("address","0x72C"), Attr("insn","stp w0, w1, [sp]")]), + Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("R31",Imm(64)),Extract(31,0,Var("R0",Imm(64))),LittleEndian(),32)), +Def(Tid(901, "%00000385"), Attrs([Attr("address","0x72C"), +Attr("insn","stp w0, w1, [sp]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("R31",Imm(64)),Int(4,64)),Extract(31,0,Var("R1",Imm(64))),LittleEndian(),32)), +Def(Tid(907, "%0000038b"), Attrs([Attr("address","0x730"), +Attr("insn","stp w2, wzr, [sp, #0x8]")]), Var("#5",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(8,64))), Def(Tid(913, "%00000391"), + Attrs([Attr("address","0x730"), Attr("insn","stp w2, wzr, [sp, #0x8]")]), + Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),Var("#5",Imm(64)),Extract(31,0,Var("R2",Imm(64))),LittleEndian(),32)), +Def(Tid(918, "%00000396"), Attrs([Attr("address","0x730"), +Attr("insn","stp w2, wzr, [sp, #0x8]")]), Var("mem",Mem(64,8)), +Store(Var("mem",Mem(64,8)),PLUS(Var("#5",Imm(64)),Int(4,64)),Int(0,32),LittleEndian(),32)), +Def(Tid(925, "%0000039d"), Attrs([Attr("address","0x734"), +Attr("insn","ldr w8, [x8]")]), Var("R8",Imm(64)), +UNSIGNED(64,Load(Var("mem",Mem(64,8)),Var("R8",Imm(64)),LittleEndian(),32))), +Def(Tid(931, "%000003a3"), Attrs([Attr("address","0x738"), +Attr("insn","add w0, w8, #0x1")]), Var("R0",Imm(64)), +UNSIGNED(64,PLUS(Extract(31,0,Var("R8",Imm(64))),Int(1,32)))), +Def(Tid(937, "%000003a9"), Attrs([Attr("address","0x73C"), +Attr("insn","add sp, sp, #0x10")]), Var("R31",Imm(64)), +PLUS(Var("R31",Imm(64)),Int(16,64)))]), Jmps([Call(Tid(942, "%000003ae"), + Attrs([Attr("address","0x740"), Attr("insn","ret")]), Int(1,1), +(Indirect(Var("R30",Imm(64))),))]))]))]))) \ No newline at end of file diff --git a/examples/stackambiguity/stackambiguity.bir b/examples/stackambiguity/stackambiguity.bir new file mode 100644 index 000000000..98ceb4590 --- /dev/null +++ b/examples/stackambiguity/stackambiguity.bir @@ -0,0 +1,310 @@ +00000754: program +0000073d: sub __cxa_finalize(__cxa_finalize_result) +00000755: __cxa_finalize_result :: out u32 = low:32[R0] + +000004bb: +000005b3: R16 := 0x11000 +000005ba: R17 := mem[R16 + 8, el]:u64 +000005c0: R16 := R16 + 8 +000005c5: call R17 with noreturn + +0000073e: sub __do_global_dtors_aux(__do_global_dtors_aux_result) +00000756: __do_global_dtors_aux_result :: out u32 = low:32[R0] + +000002da: +000002de: #3 := R31 - 0x20 +000002e4: mem := mem with [#3, el]:u64 <- R29 +000002ea: mem := mem with [#3 + 8, el]:u64 <- R30 +000002ee: R31 := #3 +000002f4: R29 := R31 +000002fc: mem := mem with [R31 + 0x10, el]:u64 <- R19 +00000301: R19 := 0x11000 +00000308: R0 := pad:64[mem[R19 + 0x30]] +0000030f: when 31:0[R0] <> 0 goto %0000030d +0000073f: goto %00000484 + +00000484: +00000487: R0 := 0x10000 +0000048e: R0 := mem[R0 + 0xFC8, el]:u64 +00000494: when R0 = 0 goto %00000492 +00000740: goto %000004ab + +000004ab: +000004ae: R0 := 0x11000 +000004b5: R0 := mem[R0 + 0x28, el]:u64 +000004ba: R30 := 0x6F0 +000004bd: call @__cxa_finalize with return %00000492 + +00000492: +0000049a: R30 := 0x6F4 +0000049c: call @deregister_tm_clones with return %0000049e + +0000049e: +000004a1: R0 := 1 +000004a9: mem := mem with [R19 + 0x30] <- 7:0[R0] +00000741: goto %0000030d + +0000030d: +00000317: R19 := mem[R31 + 0x10, el]:u64 +0000031e: R29 := mem[R31, el]:u64 +00000323: R30 := mem[R31 + 8, el]:u64 +00000327: R31 := R31 + 0x20 +0000032c: call R30 with noreturn + +00000742: sub __libc_start_main(__libc_start_main_main, __libc_start_main_arg2, __libc_start_main_arg3, __libc_start_main_auxv, __libc_start_main_result) +00000757: __libc_start_main_main :: in u64 = R0 +00000758: __libc_start_main_arg2 :: in u32 = low:32[R1] +00000759: __libc_start_main_arg3 :: in out u64 = R2 +0000075a: __libc_start_main_auxv :: in out u64 = R3 +0000075b: __libc_start_main_result :: out u32 = low:32[R0] + +00000233: +0000059d: R16 := 0x11000 +000005a4: R17 := mem[R16, el]:u64 +000005aa: R16 := R16 +000005af: call R17 with noreturn + +00000743: sub _fini(_fini_result) +0000075c: _fini_result :: out u32 = low:32[R0] + +0000001f: +00000025: #0 := R31 - 0x10 +0000002b: mem := mem with [#0, el]:u64 <- R29 +00000031: mem := mem with [#0 + 8, el]:u64 <- R30 +00000035: R31 := #0 +0000003b: R29 := R31 +00000042: R29 := mem[R31, el]:u64 +00000047: R30 := mem[R31 + 8, el]:u64 +0000004b: R31 := R31 + 0x10 +00000050: call R30 with noreturn + +00000744: sub _init(_init_result) +0000075d: _init_result :: out u32 = low:32[R0] + +0000067c: +00000682: #9 := R31 - 0x10 +00000688: mem := mem with [#9, el]:u64 <- R29 +0000068e: mem := mem with [#9 + 8, el]:u64 <- R30 +00000692: R31 := #9 +00000698: R29 := R31 +0000069d: R30 := 0x590 +0000069f: call @call_weak_fn with return %000006a1 + +000006a1: +000006a6: R29 := mem[R31, el]:u64 +000006ab: R30 := mem[R31 + 8, el]:u64 +000006af: R31 := R31 + 0x10 +000006b4: call R30 with noreturn + +00000745: sub _start(_start_result) +0000075e: _start_result :: out u32 = low:32[R0] + +000001f4: +000001f9: R29 := 0 +000001fe: R30 := 0 +00000204: R5 := R0 +0000020b: R1 := mem[R31, el]:u64 +00000211: R2 := R31 + 8 +00000217: R6 := R31 +0000021c: R0 := 0x10000 +00000223: R0 := mem[R0 + 0xFD8, el]:u64 +00000228: R3 := 0 +0000022d: R4 := 0 +00000232: R30 := 0x630 +00000235: call @__libc_start_main with return %00000237 + +00000237: +0000023a: R30 := 0x634 +0000023d: call @abort with return %00000746 + +00000746: +00000747: call @call_weak_fn with noreturn + +00000748: sub abort() + + +0000023b: +000005df: R16 := 0x11000 +000005e6: R17 := mem[R16 + 0x18, el]:u64 +000005ec: R16 := R16 + 0x18 +000005f1: call R17 with noreturn + +00000749: sub call_weak_fn(call_weak_fn_result) +0000075f: call_weak_fn_result :: out u32 = low:32[R0] + +0000023f: +00000242: R0 := 0x10000 +00000249: R0 := mem[R0 + 0xFD0, el]:u64 +0000024f: when R0 = 0 goto %0000024d +0000074a: goto %000004fb + +0000024d: +00000255: call R30 with noreturn + +000004fb: +000004fe: goto @__gmon_start__ + +000004fc: +000005c9: R16 := 0x11000 +000005d0: R17 := mem[R16 + 0x10, el]:u64 +000005d6: R16 := R16 + 0x10 +000005db: call R17 with noreturn + +0000074b: sub deregister_tm_clones(deregister_tm_clones_result) +00000760: deregister_tm_clones_result :: out u32 = low:32[R0] + +0000025b: +0000025e: R0 := 0x11000 +00000264: R0 := R0 + 0x30 +00000269: R1 := 0x11000 +0000026f: R1 := R1 + 0x30 +00000275: #1 := ~R0 +0000027a: #2 := R1 + ~R0 +00000280: VF := extend:65[#2 + 1] <> extend:65[R1] + extend:65[#1] + 1 +00000286: CF := pad:65[#2 + 1] <> pad:65[R1] + pad:65[#1] + 1 +0000028a: ZF := #2 + 1 = 0 +0000028e: NF := 63:63[#2 + 1] +00000294: when ZF goto %00000292 +0000074c: goto %000004dd + +000004dd: +000004e0: R1 := 0x10000 +000004e7: R1 := mem[R1 + 0xFC0, el]:u64 +000004ec: when R1 = 0 goto %00000292 +0000074d: goto %000004f0 + +00000292: +0000029a: call R30 with noreturn + +000004f0: +000004f4: R16 := R1 +000004f9: call R16 with noreturn + +0000074e: sub frame_dummy(frame_dummy_result) +00000761: frame_dummy_result :: out u32 = low:32[R0] + +00000332: +00000334: call @register_tm_clones with noreturn + +0000074f: sub main(main_argc, main_argv, main_result) +00000762: main_argc :: in u32 = low:32[R0] +00000763: main_argv :: in out u64 = R1 +00000764: main_result :: out u32 = low:32[R0] + +000003b0: +000003b4: R31 := R31 - 0x10 +000003ba: R9 := pad:64[31:0[R0] + 4] +000003c1: R11 := pad:64[31:0[R0] + (29:0[R0].0)] +000003c6: R8 := 6 +000003cc: R10 := R31 +000003d2: R13 := extend:64[31:0[R0].0] +000003d9: R8 := pad:64[31:0[R8] + ~31:0[R0] + 1] +000003df: R12 := R10 + 8 +000003e5: #6 := 31:0[R0] - 4 +000003ea: VF := extend:33[#6 + 1] <> extend:33[31:0[R0]] - 3 +000003ef: CF := pad:33[#6 + 1] <> pad:33[31:0[R0]] + 0xFFFFFFFD +000003f3: ZF := #6 + 1 = 0 +000003f7: NF := 31:31[#6 + 1] +000003fe: R10 := R10 + R13 +00000406: mem := mem with [R31, el]:u32 <- 31:0[R9] +0000040c: mem := mem with [R31 + 4, el]:u32 <- 31:0[R11] +0000041b: when NF <> VF goto %00000413 +0000041c: goto %00000417 + +00000417: +00000418: R9 := R12 +0000041e: goto %0000041a + +00000413: +00000414: R9 := R10 +0000041d: goto %0000041a + +0000041a: +00000423: R13 := 0x5A +00000432: when NF <> VF goto %0000042a +00000433: goto %0000042e + +0000042e: +0000042f: R10 := R12 +00000435: goto %00000431 + +0000042a: +0000042b: R10 := R10 +00000434: goto %00000431 + +00000431: +0000043b: #7 := R31 + 8 +00000441: mem := mem with [#7, el]:u32 <- 31:0[R8] +00000446: mem := mem with [#7 + 4, el]:u32 <- 0 +0000044d: R9 := pad:64[mem[R9, el]:u32] +00000455: mem := mem with [R31, el]:u32 <- 31:0[R11] +0000045b: mem := mem with [R31 + 4, el]:u32 <- 31:0[R8] +00000463: mem := mem with [R31 + 8, el]:u64 <- R13 +0000046a: R8 := pad:64[mem[R10, el]:u32] +00000471: R8 := pad:64[31:0[R9] + 31:0[R8]] +00000477: R0 := pad:64[31:0[R8] + 2] +0000047d: R31 := R31 + 0x10 +00000482: call R30 with noreturn + +00000750: sub register_tm_clones(register_tm_clones_result) +00000765: register_tm_clones_result :: out u32 = low:32[R0] + +0000029c: +0000029f: R0 := 0x11000 +000002a5: R0 := R0 + 0x30 +000002aa: R1 := 0x11000 +000002b0: R1 := R1 + 0x30 +000002b7: R1 := R1 + ~R0 + 1 +000002bd: R2 := 0.63:63[R1] +000002c4: R1 := R2 + (R1 ~>> 3) +000002ca: R1 := extend:64[63:1[R1]] +000002d0: when R1 = 0 goto %000002ce +00000751: goto %000004bf + +000004bf: +000004c2: R2 := 0x10000 +000004c9: R2 := mem[R2 + 0xFE0, el]:u64 +000004ce: when R2 = 0 goto %000002ce +00000752: goto %000004d2 + +000002ce: +000002d6: call R30 with noreturn + +000004d2: +000004d6: R16 := R2 +000004db: call R16 with noreturn + +00000753: sub test(test_result) +00000766: test_result :: out u32 = low:32[R0] + +00000336: +0000033a: R31 := R31 - 0x10 +00000340: R8 := R31 +00000346: #4 := 31:0[R3] - 4 +0000034b: VF := extend:33[#4 + 1] <> extend:33[31:0[R3]] - 3 +00000350: CF := pad:33[#4 + 1] <> pad:33[31:0[R3]] + 0xFFFFFFFD +00000354: ZF := #4 + 1 = 0 +00000358: NF := 31:31[#4 + 1] +0000035f: R9 := R8 + extend:64[31:0[R3].0] +00000365: R8 := R8 + 8 +00000374: when NF <> VF goto %0000036c +00000375: goto %00000370 + +00000370: +00000371: R8 := R8 +00000377: goto %00000373 + +0000036c: +0000036d: R8 := R9 +00000376: goto %00000373 + +00000373: +0000037f: mem := mem with [R31, el]:u32 <- 31:0[R0] +00000385: mem := mem with [R31 + 4, el]:u32 <- 31:0[R1] +0000038b: #5 := R31 + 8 +00000391: mem := mem with [#5, el]:u32 <- 31:0[R2] +00000396: mem := mem with [#5 + 4, el]:u32 <- 0 +0000039d: R8 := pad:64[mem[R8, el]:u32] +000003a3: R0 := pad:64[31:0[R8] + 1] +000003a9: R31 := R31 + 0x10 +000003ae: call R30 with noreturn diff --git a/examples/stackambiguity/stackambiguity.c b/examples/stackambiguity/stackambiguity.c new file mode 100644 index 000000000..ed765bc30 --- /dev/null +++ b/examples/stackambiguity/stackambiguity.c @@ -0,0 +1,19 @@ +int test(int a, int b, int c, int d) { + int q = 0; + int x[4] = {a, b, c}; + int* y = &x[2]; + if (d < 3) { + y = &x[d]; + } + q = *y + 1; + return q; +} + +int main(int argc, char** argv) { + int a = 4 + argc; + int b = 5 * argc; + int c = 6 - argc; + int d = test(a, b, c, argc); + int e = test(b, c, 90, argc); + return d + e; +} \ No newline at end of file diff --git a/examples/stackambiguity/stackambiguity.relf b/examples/stackambiguity/stackambiguity.relf new file mode 100644 index 000000000..db606f74a --- /dev/null +++ b/examples/stackambiguity/stackambiguity.relf @@ -0,0 +1,122 @@ + +Relocation section '.rela.dyn' at offset 0x460 contains 8 entries: + Offset Info Type Symbol's Value Symbol's Name + Addend +0000000000010dc8 0000000000000403 R_AARCH64_RELATIVE 710 +0000000000010dd0 0000000000000403 R_AARCH64_RELATIVE 6c0 +0000000000010fd8 0000000000000403 R_AARCH64_RELATIVE 744 +0000000000011028 0000000000000403 R_AARCH64_RELATIVE 11028 +0000000000010fc0 0000000400000401 R_AARCH64_GLOB_DAT 0000000000000000 _ITM_deregisterTMCloneTable + 0 +0000000000010fc8 0000000500000401 R_AARCH64_GLOB_DAT 0000000000000000 __cxa_finalize@GLIBC_2.17 + 0 +0000000000010fd0 0000000600000401 R_AARCH64_GLOB_DAT 0000000000000000 __gmon_start__ + 0 +0000000000010fe0 0000000800000401 R_AARCH64_GLOB_DAT 0000000000000000 _ITM_registerTMCloneTable + 0 + +Relocation section '.rela.plt' at offset 0x520 contains 4 entries: + Offset Info Type Symbol's Value Symbol's Name + Addend +0000000000011000 0000000300000402 R_AARCH64_JUMP_SLOT 0000000000000000 __libc_start_main@GLIBC_2.34 + 0 +0000000000011008 0000000500000402 R_AARCH64_JUMP_SLOT 0000000000000000 __cxa_finalize@GLIBC_2.17 + 0 +0000000000011010 0000000600000402 R_AARCH64_JUMP_SLOT 0000000000000000 __gmon_start__ + 0 +0000000000011018 0000000700000402 R_AARCH64_JUMP_SLOT 0000000000000000 abort@GLIBC_2.17 + 0 + +Symbol table '.dynsym' contains 9 entries: + Num: Value Size Type Bind Vis Ndx Name + 0: 0000000000000000 0 NOTYPE LOCAL DEFAULT UND + 1: 0000000000000580 0 SECTION LOCAL DEFAULT 11 .init + 2: 0000000000011020 0 SECTION LOCAL DEFAULT 23 .data + 3: 0000000000000000 0 FUNC GLOBAL DEFAULT UND __libc_start_main@GLIBC_2.34 (2) + 4: 0000000000000000 0 NOTYPE WEAK DEFAULT UND _ITM_deregisterTMCloneTable + 5: 0000000000000000 0 FUNC WEAK DEFAULT UND __cxa_finalize@GLIBC_2.17 (3) + 6: 0000000000000000 0 NOTYPE WEAK DEFAULT UND __gmon_start__ + 7: 0000000000000000 0 FUNC GLOBAL DEFAULT UND abort@GLIBC_2.17 (3) + 8: 0000000000000000 0 NOTYPE WEAK DEFAULT UND _ITM_registerTMCloneTable + +Symbol table '.symtab' contains 89 entries: + Num: Value Size Type Bind Vis Ndx Name + 0: 0000000000000000 0 NOTYPE LOCAL DEFAULT UND + 1: 0000000000000238 0 SECTION LOCAL DEFAULT 1 .interp + 2: 0000000000000254 0 SECTION LOCAL DEFAULT 2 .note.gnu.build-id + 3: 0000000000000278 0 SECTION LOCAL DEFAULT 3 .note.ABI-tag + 4: 0000000000000298 0 SECTION LOCAL DEFAULT 4 .gnu.hash + 5: 00000000000002b8 0 SECTION LOCAL DEFAULT 5 .dynsym + 6: 0000000000000390 0 SECTION LOCAL DEFAULT 6 .dynstr + 7: 000000000000041e 0 SECTION LOCAL DEFAULT 7 .gnu.version + 8: 0000000000000430 0 SECTION LOCAL DEFAULT 8 .gnu.version_r + 9: 0000000000000460 0 SECTION LOCAL DEFAULT 9 .rela.dyn + 10: 0000000000000520 0 SECTION LOCAL DEFAULT 10 .rela.plt + 11: 0000000000000580 0 SECTION LOCAL DEFAULT 11 .init + 12: 00000000000005a0 0 SECTION LOCAL DEFAULT 12 .plt + 13: 0000000000000600 0 SECTION LOCAL DEFAULT 13 .text + 14: 00000000000007a0 0 SECTION LOCAL DEFAULT 14 .fini + 15: 00000000000007b4 0 SECTION LOCAL DEFAULT 15 .rodata + 16: 00000000000007b8 0 SECTION LOCAL DEFAULT 16 .eh_frame_hdr + 17: 0000000000000800 0 SECTION LOCAL DEFAULT 17 .eh_frame + 18: 0000000000010dc8 0 SECTION LOCAL DEFAULT 18 .init_array + 19: 0000000000010dd0 0 SECTION LOCAL DEFAULT 19 .fini_array + 20: 0000000000010dd8 0 SECTION LOCAL DEFAULT 20 .dynamic + 21: 0000000000010fb8 0 SECTION LOCAL DEFAULT 21 .got + 22: 0000000000010fe8 0 SECTION LOCAL DEFAULT 22 .got.plt + 23: 0000000000011020 0 SECTION LOCAL DEFAULT 23 .data + 24: 0000000000011030 0 SECTION LOCAL DEFAULT 24 .bss + 25: 0000000000000000 0 SECTION LOCAL DEFAULT 25 .comment + 26: 0000000000000000 0 FILE LOCAL DEFAULT ABS Scrt1.o + 27: 0000000000000278 0 NOTYPE LOCAL DEFAULT 3 $d + 28: 0000000000000278 32 OBJECT LOCAL DEFAULT 3 __abi_tag + 29: 0000000000000600 0 NOTYPE LOCAL DEFAULT 13 $x + 30: 0000000000000814 0 NOTYPE LOCAL DEFAULT 17 $d + 31: 00000000000007b4 0 NOTYPE LOCAL DEFAULT 15 $d + 32: 0000000000000000 0 FILE LOCAL DEFAULT ABS crti.o + 33: 0000000000000634 0 NOTYPE LOCAL DEFAULT 13 $x + 34: 0000000000000634 20 FUNC LOCAL DEFAULT 13 call_weak_fn + 35: 0000000000000580 0 NOTYPE LOCAL DEFAULT 11 $x + 36: 00000000000007a0 0 NOTYPE LOCAL DEFAULT 14 $x + 37: 0000000000000000 0 FILE LOCAL DEFAULT ABS crtn.o + 38: 0000000000000590 0 NOTYPE LOCAL DEFAULT 11 $x + 39: 00000000000007ac 0 NOTYPE LOCAL DEFAULT 14 $x + 40: 0000000000000000 0 FILE LOCAL DEFAULT ABS crtstuff.c + 41: 0000000000000650 0 NOTYPE LOCAL DEFAULT 13 $x + 42: 0000000000000650 0 FUNC LOCAL DEFAULT 13 deregister_tm_clones + 43: 0000000000000680 0 FUNC LOCAL DEFAULT 13 register_tm_clones + 44: 0000000000011028 0 NOTYPE LOCAL DEFAULT 23 $d + 45: 00000000000006c0 0 FUNC LOCAL DEFAULT 13 __do_global_dtors_aux + 46: 0000000000011030 1 OBJECT LOCAL DEFAULT 24 completed.0 + 47: 0000000000010dd0 0 NOTYPE LOCAL DEFAULT 19 $d + 48: 0000000000010dd0 0 OBJECT LOCAL DEFAULT 19 __do_global_dtors_aux_fini_array_entry + 49: 0000000000000710 0 FUNC LOCAL DEFAULT 13 frame_dummy + 50: 0000000000010dc8 0 NOTYPE LOCAL DEFAULT 18 $d + 51: 0000000000010dc8 0 OBJECT LOCAL DEFAULT 18 __frame_dummy_init_array_entry + 52: 0000000000000828 0 NOTYPE LOCAL DEFAULT 17 $d + 53: 0000000000011030 0 NOTYPE LOCAL DEFAULT 24 $d + 54: 0000000000000000 0 FILE LOCAL DEFAULT ABS stackambiguity.c + 55: 0000000000000714 0 NOTYPE LOCAL DEFAULT 13 $x.0 + 56: 000000000000002a 0 NOTYPE LOCAL DEFAULT 25 $d.1 + 57: 0000000000000888 0 NOTYPE LOCAL DEFAULT 17 $d.2 + 58: 0000000000000000 0 FILE LOCAL DEFAULT ABS crtstuff.c + 59: 00000000000008cc 0 NOTYPE LOCAL DEFAULT 17 $d + 60: 00000000000008cc 0 OBJECT LOCAL DEFAULT 17 __FRAME_END__ + 61: 0000000000000000 0 FILE LOCAL DEFAULT ABS + 62: 0000000000010dd8 0 OBJECT LOCAL DEFAULT ABS _DYNAMIC + 63: 00000000000007b8 0 NOTYPE LOCAL DEFAULT 16 __GNU_EH_FRAME_HDR + 64: 0000000000010fb8 0 OBJECT LOCAL DEFAULT ABS _GLOBAL_OFFSET_TABLE_ + 65: 00000000000005a0 0 NOTYPE LOCAL DEFAULT 12 $x + 66: 0000000000000000 0 FUNC GLOBAL DEFAULT UND __libc_start_main@GLIBC_2.34 + 67: 0000000000000000 0 NOTYPE WEAK DEFAULT UND _ITM_deregisterTMCloneTable + 68: 0000000000011020 0 NOTYPE WEAK DEFAULT 23 data_start + 69: 0000000000011030 0 NOTYPE GLOBAL DEFAULT 24 __bss_start__ + 70: 0000000000000000 0 FUNC WEAK DEFAULT UND __cxa_finalize@GLIBC_2.17 + 71: 0000000000011038 0 NOTYPE GLOBAL DEFAULT 24 _bss_end__ + 72: 0000000000011030 0 NOTYPE GLOBAL DEFAULT 23 _edata + 73: 00000000000007a0 0 FUNC GLOBAL HIDDEN 14 _fini + 74: 0000000000011038 0 NOTYPE GLOBAL DEFAULT 24 __bss_end__ + 75: 0000000000011020 0 NOTYPE GLOBAL DEFAULT 23 __data_start + 76: 0000000000000000 0 NOTYPE WEAK DEFAULT UND __gmon_start__ + 77: 0000000000011028 0 OBJECT GLOBAL HIDDEN 23 __dso_handle + 78: 0000000000000000 0 FUNC GLOBAL DEFAULT UND abort@GLIBC_2.17 + 79: 00000000000007b4 4 OBJECT GLOBAL DEFAULT 15 _IO_stdin_used + 80: 0000000000011038 0 NOTYPE GLOBAL DEFAULT 24 _end + 81: 0000000000000600 52 FUNC GLOBAL DEFAULT 13 _start + 82: 0000000000011038 0 NOTYPE GLOBAL DEFAULT 24 __end__ + 83: 0000000000011030 0 NOTYPE GLOBAL DEFAULT 24 __bss_start + 84: 0000000000000744 92 FUNC GLOBAL DEFAULT 13 main + 85: 0000000000011030 0 OBJECT GLOBAL HIDDEN 23 __TMC_END__ + 86: 0000000000000000 0 NOTYPE WEAK DEFAULT UND _ITM_registerTMCloneTable + 87: 0000000000000580 0 FUNC GLOBAL HIDDEN 11 _init + 88: 0000000000000714 48 FUNC GLOBAL DEFAULT 13 test From 9bc70df38537de7dc86e1360276cd3af354ed441 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 30 Sep 2024 15:59:10 +1000 Subject: [PATCH 068/104] removed the unambiguous stack assumption --- src/main/scala/analysis/DSAUtility.scala | 114 +++++++++++++---------- src/main/scala/analysis/LocalDSA.scala | 68 +++++++++----- 2 files changed, 109 insertions(+), 73 deletions(-) diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index f0863f134..627bb02c3 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -4,6 +4,7 @@ import analysis.solvers.{DSAUniTerm, DSAUnionFindSolver, UnionFindSolver, Var} import cfg_visualiser.{DotStruct, DotStructElement, StructArrow, StructDotGraph} import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IntraProcIRCursor, Literal, Memory, MemoryAssign, MemoryLoad, Procedure, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, begin, computeDomain, toShortString} import specification.{ExternalFunction, SpecGlobal, SymbolTableEntry} +import util.Logger import scala.util.control.Breaks.{break, breakable} import scala.collection.mutable @@ -51,70 +52,87 @@ class DSG(val proc: Procedure, val mallocRegister = Register("R0", 64) val stackPointer = Register("R31", 64) - // this is the mapping from offsets/positions on the stack to their representative DS nodes - val stackMapping: mutable.Map[BigInt, DSN] = - computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(Map[BigInt, DSN]()) { - (results, pos) => stackBuilder(pos, results) - }.to(collection.mutable.Map) - + // collect all stack access and their maximum accessed size + // BigInt is the offset of the stack position and Int is it's size + val stackAccesses: Map[BigInt, Int] = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(Map[BigInt, Int]()) { + (results, pos) => + pos match + case Assign(variable: Variable, expr: Expr, _) => + expr match + case MemoryLoad(mem, index, endian, size) => + visitStackAccess(pos, index, size).foldLeft(results) { + (res, access) => + if !res.contains(access.offset) || (res.getOrElse(access.offset, -1) < access.size) then + res + (access.offset -> access.size) + else + res + } + case _ => + visitStackAccess(pos, expr, 0).foldLeft(results) { + (res, access) => + if !res.contains(access.offset) || (res.getOrElse(access.offset, -1) < access.size) then + res + (access.offset -> access.size) + else + res + } + case MemoryAssign(mem, index: Expr, value: Expr, endian, size: Int, label) => + visitStackAccess(pos, index, size).foldLeft(results) { + (res, access) => + if !res.contains(access.offset) || (res.getOrElse(access.offset, -1) < access.size) then + res + (access.offset -> access.size) + else + res + } + case _ => results + } - /** - * this function takes a stackMapping and updates it based on a memory load and memory store - * @param pos memory load or store IL position - * @param index memory location of load or store - * @param size size of the load or store - * @param m stack mapping - * @return updated stack mapping - */ - private def visitStackAccess(pos: CFGPosition, index: Expr, size: Int, m: Map[BigInt, DSN]) : Map[BigInt, DSN] = - assert(size % 8 == 0) + case class StackAccess(offset: BigInt, size: Int) + private def visitStackAccess(pos: CFGPosition, index: Expr, size: Int): Set[StackAccess] = { + assert( size % 8 == 0) val byteSize = size / 8 index match case BinaryExpr(op, arg1: Variable, arg2) if varToSym.contains(pos) && varToSym(pos).contains(arg1) && evaluateExpression(arg2, constProp(pos)).isDefined => - var offset = evaluateExpression(arg2, constProp(pos)).get.value - varToSym(pos)(arg1).foldLeft(m) { // go through all the symbolic accesses tied to arg1 at pos + val offset = evaluateExpression(arg2, constProp(pos)).get.value + varToSym(pos)(arg1).foldLeft(Set[StackAccess]()) { // go through all the symbolic accesses tied to arg1 at pos (m, sym) => sym match case SymbolicAddress(accessor, StackLocation(regionIdentifier, proc, size), symOffset) => // only consider stack accesses - offset = offset + symOffset - createStackMapping(pos.toShortString, offset, m, byteSize) + m + StackAccess(offset + symOffset, byteSize) case _ => m } case arg: Variable if varToSym.contains(pos) && varToSym(pos).contains(arg) => - varToSym(pos)(arg).foldLeft(m) { + varToSym(pos)(arg).foldLeft(Set[StackAccess]()) { (m, sym) => sym match case SymbolicAddress(accessor, StackLocation(regionIdentifier, proc, size), offset) => - createStackMapping(pos.toShortString, offset, m, byteSize) +// createStackMapping(pos.toShortString, offset, m, byteSize) + m + StackAccess(offset, byteSize) case _ => m } - case _ => m - - private def stackBuilder(pos: CFGPosition, m: Map[BigInt, DSN]): Map[BigInt, DSN] = { - pos match - case Assign(variable: Variable, expr: Expr, _) => - expr match - case MemoryLoad(mem, index, endian, size) => - visitStackAccess(pos, index, size, m) - case _ => m - case MemoryAssign(mem, index: Expr, value: Expr, endian, size: Int, label) => - visitStackAccess(pos, index, size, m) - case _ => m - + case _ => Set.empty } - private def createStackMapping(label: String, offset: BigInt, m: Map[BigInt, DSN], byteSize: Int) : Map[BigInt, DSN]= - if m.contains(offset) then - assert(!m(offset).cells(0).growSize(byteSize)) - m - else - val node = DSN(Some(this), byteSize) - node.allocationRegions.add(StackLocation(label, proc, byteSize)) - node.flags.stack = true - node.addCell(0, byteSize) - m + (offset -> node) + // this is the mapping from offsets/positions on the stack to their representative DS nodes + val stackMapping: mutable.Map[BigInt, DSN] = mutable.Map() + var lastOffset: BigInt = -1 + var nextValidOffset: BigInt = 0 + stackAccesses.keys.toSeq.sorted.foreach( + offset => + val byteSize = stackAccesses(offset) + if offset >= nextValidOffset then + val node = DSN(Some(this), byteSize) + node.allocationRegions.add(StackLocation(s"Stack_${proc}_${offset}", proc, byteSize)) + node.flags.stack = true + node.addCell(0, byteSize) + stackMapping.update(offset, node) + lastOffset = offset + else + val diff = nextValidOffset - offset + stackMapping(lastOffset).addCell(diff, byteSize) + nextValidOffset = offset + byteSize + ) private val swappedOffsets = globalOffsets.map(_.swap) @@ -171,7 +189,6 @@ class DSG(val proc: Procedure, ) - // determine if an address is a global and return the corresponding global if it is. def isGlobal(address: BigInt): Option[DSAGlobal] = var global: Option[DSAGlobal] = None @@ -806,8 +823,9 @@ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCount } result match case Some(value) => value - case None => - ??? + case None => ??? +// Logger.warn(s"$this didn't have a cell at offset: $offset. An empty cell was added in") +// addCell(offset, 0) else cells(offset) diff --git a/src/main/scala/analysis/LocalDSA.scala b/src/main/scala/analysis/LocalDSA.scala index 053e4ccd1..2efb6e549 100644 --- a/src/main/scala/analysis/LocalDSA.scala +++ b/src/main/scala/analysis/LocalDSA.scala @@ -47,10 +47,16 @@ class LocalDSA( (m, access) => if m.contains(access._1.accessor) then // every variable pointing to a stack region ONLY has one symbolic access associated with it. - m(access._1.accessor).foreach( - sym => assert(!sym.symbolicBase.isInstanceOf[StackLocation]) - ) - assert(!access._1.symbolicBase.isInstanceOf[StackLocation]) +// m(access._1.accessor).foreach( +// sym => +// if (sym.symbolicBase.isInstanceOf[StackLocation]) then +// println(m) +// println(access._1.accessor) +// println(access) +// print("") +// //assert(!sym.symbolicBase.isInstanceOf[StackLocation]) +// ) +// assert(!access._1.symbolicBase.isInstanceOf[StackLocation]) m + (access._1.accessor -> (m(access._1.accessor) + access._1)) else m + (access._1.accessor -> Set(access._1)) @@ -58,6 +64,25 @@ class LocalDSA( outerMap + (position -> innerMap) } + private def getStack(offset: BigInt): DSC = + var last: BigInt = 0 + if graph.stackMapping.contains(offset) then + graph.stackMapping(offset).cells(0) + else + breakable { + graph.stackMapping.keys.foreach( + elementOffset => + if offset < elementOffset then + break + else + last = elementOffset + ) + } + val diff = offset - last + assert(graph.stackMapping.contains(last)) + graph.stackMapping(last).getCell(diff) + + /** * if an expr is the address of a stack location return its corresponding cell @@ -66,22 +91,24 @@ class LocalDSA( def isStack(expr: Expr, pos: CFGPosition): Option[DSC] = expr match case BinaryExpr(op, arg1: Variable, arg2) if varToSym.contains(pos) && varToSym(pos).contains(arg1) && - varToSym(pos)(arg1).size == 1 && varToSym(pos)(arg1).head.symbolicBase.isInstanceOf[StackLocation] => + varToSym(pos)(arg1).exists(s => s.symbolicBase.isInstanceOf[StackLocation]) => evaluateExpression(arg2, constProp(pos)) match case Some(v) => - val offset = v.value + varToSym(pos)(arg1).head.offset - if graph.stackMapping.contains(offset) then - Some(graph.stackMapping(offset).cells(0)) - else - None + val stackRegions = varToSym(pos)(arg1).filter(s => s.symbolicBase.isInstanceOf[StackLocation]) + val res = stackRegions.tail.foldLeft(getStack(v.value + stackRegions.head.offset)) { + (res, sym) => + graph.mergeCells(res, getStack(v.value + sym.offset)) + } + Some(res) case None => None case arg: Variable if varToSym.contains(pos) && varToSym(pos).contains(arg) && - varToSym(pos)(arg).size == 1 && varToSym(pos)(arg).head.symbolicBase.isInstanceOf[StackLocation] => - val offset = varToSym(pos)(arg).head.offset - if graph.stackMapping.contains(offset) then - Some(graph.stackMapping(offset).cells(0)) - else - None + varToSym(pos)(arg).exists(s => s.symbolicBase.isInstanceOf[StackLocation]) => + val stackRegions = varToSym(pos)(arg).filter(s => s.symbolicBase.isInstanceOf[StackLocation]) + val res = stackRegions.tail.foldLeft(getStack(stackRegions.head.offset)) { + (res, sym) => + graph.mergeCells(res, getStack(sym.offset)) + } + Some(res) case _ => None @@ -183,15 +210,6 @@ class LocalDSA( * handles unsupported pointer arithmetic by collapsing all the nodes invloved */ def unsupportedPointerArithmeticOperation(n: CFGPosition, expr: Expr, lhsCell: DSC): DSC = { -// var containsPointer = false -// breakable { -// for (v <- expr.variables) { -// if varToSym.contains(n) && varToSym(n).contains(v) then -// containsPointer = true -// break -// } -// } -// if containsPointer then val cell = expr.variables.foldLeft(lhsCell) { (c, v) => val cells: Set[Slice] = graph.getCells(n, v) From 69792b765df3066a367bab5a83e15f8a02073807 Mon Sep 17 00:00:00 2001 From: l-kent Date: Thu, 10 Oct 2024 11:13:12 +1000 Subject: [PATCH 069/104] fix IDESolver issues with empty procedures --- .../scala/analysis/solvers/IDESolver.scala | 15 ++++++++----- src/main/scala/util/RunUtils.scala | 22 +++---------------- 2 files changed, 13 insertions(+), 24 deletions(-) diff --git a/src/main/scala/analysis/solvers/IDESolver.scala b/src/main/scala/analysis/solvers/IDESolver.scala index 5e94726d9..c057b98f0 100644 --- a/src/main/scala/analysis/solvers/IDESolver.scala +++ b/src/main/scala/analysis/solvers/IDESolver.scala @@ -199,10 +199,15 @@ abstract class IDESolver[E <: Procedure | Command, EE <: Procedure | Command, C } def analyze(): Map[CFGPosition, Map[D, T]] = { - val phase1 = Phase1(program) - phase1.analyze() - val phase2 = Phase2(program, phase1) - phase2.restructure(phase2.analyze()) + if (program.mainProcedure.blocks.nonEmpty && program.mainProcedure.returnBlock.isDefined && program.mainProcedure.entryBlock.isDefined) { + val phase1 = Phase1(program) + phase1.analyze() + val phase2 = Phase2(program, phase1) + phase2.restructure(phase2.analyze()) + } else { + Logger.warn(s"Disabling IDE solver tests due to external main procedure: ${program.mainProcedure.name}") + Map() + } } } @@ -244,7 +249,7 @@ abstract class ForwardIDESolver[D, T, L <: Lattice[T]](program: Program) abstract class BackwardIDESolver[D, T, L <: Lattice[T]](program: Program) - extends IDESolver[Return, Procedure, Command, DirectCall, D, T, L](program, IRWalk.lastInProc(program.mainProcedure).get), + extends IDESolver[Return, Procedure, Command, DirectCall, D, T, L](program, IRWalk.lastInProc(program.mainProcedure).getOrElse(program.mainProcedure)), BackwardIDEAnalysis[D, T, L], IRInterproceduralBackwardDependencies { protected def entryToExit(entry: Return): Procedure = IRWalk.procedure(entry) diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 303e11885..542b58b83 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -417,20 +417,8 @@ object StaticAnalysis { val vsaSolver = ValueSetAnalysisSolver(IRProgram, globalAddresses, externalAddresses, globalOffsets, subroutines, mmm, constPropResult) val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = vsaSolver.analyze() - - var paramResults: Map[Procedure, Set[Variable]] = Map.empty - var interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]] = Map.empty - - if (IRProgram.mainProcedure.blocks.nonEmpty && IRProgram.mainProcedure.returnBlock.isDefined && IRProgram.mainProcedure.entryBlock.isDefined) { - Logger.debug("[!] Running Interprocedural Live Variables Analysis") - interLiveVarsResults = InterLiveVarsAnalysis(IRProgram).analyze() - - Logger.debug("[!] Running Parameter Analysis") - paramResults = ParamAnalysis(IRProgram).analyze() - - } else { - Logger.warn(s"Disabling IDE solver tests due to external main procedure: ${IRProgram.mainProcedure.name}") - } + val paramResults: Map[Procedure, Set[Variable]] = ParamAnalysis(IRProgram).analyze() + val interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]] = InterLiveVarsAnalysis(IRProgram).analyze() StaticAnalysisContext( constPropResult = constPropResult, @@ -598,18 +586,14 @@ object RunUtils { }), s"${s}_saa.dot") ) - Logger.debug("[!] Running Parameter Analysis") - val paramResults = ParamAnalysis(ctx.program).analyze() - Logger.debug("[!] Running DSA Analysis") val symbolTableEntries: Set[SymbolTableEntry] = ctx.globals ++ ctx.funcEntries - val dsa = DSA(ctx.program, symResults, analysisResult.last.IRconstPropResult, symbolTableEntries, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, paramResults) + val dsa = DSA(ctx.program, symResults, analysisResult.last.IRconstPropResult, symbolTableEntries, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, analysisResult.last.paramResults) dsa.analyze() assert(invariant.singleCallBlockEnd(ctx.program)) Logger.debug(s"[!] Finished indirect call resolution after $iteration iterations") analysisResult.last.copy( - paramResults = paramResults, SymbolicAddressess = symResults, locals = Some(dsa.locals.toMap), bus = Some(dsa.bu.toMap), From 4592fd342d141cf25529500be7fb7413c8fde34f Mon Sep 17 00:00:00 2001 From: l-kent Date: Mon, 14 Oct 2024 10:57:16 +1000 Subject: [PATCH 070/104] general cleanup of DSA, particularly around overuse of tuple accesses which are difficult to follow at a glance --- src/main/scala/analysis/DSA.scala | 276 +++--- src/main/scala/analysis/DSAUtility.scala | 888 +++++++++--------- src/main/scala/analysis/LocalDSA.scala | 222 +++-- .../ReachingDefinitionsAnalysis.scala | 47 +- .../analysis/SymbolicAddressAnalysis.scala | 18 +- .../analysis/solvers/DSAUnionFindSolver.scala | 41 +- .../analysis/solvers/UnionFindSolver.scala | 2 +- src/test/scala/DSATest.scala | 56 +- 8 files changed, 735 insertions(+), 815 deletions(-) diff --git a/src/main/scala/analysis/DSA.scala b/src/main/scala/analysis/DSA.scala index 23c831618..3078ba66c 100644 --- a/src/main/scala/analysis/DSA.scala +++ b/src/main/scala/analysis/DSA.scala @@ -26,62 +26,59 @@ class DSA(program: Program, externalFunctions: Set[ExternalFunction], reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], writesTo: Map[Procedure, Set[Register]], - params: Map[Procedure, Set[Variable]] + params: Map[Procedure, Set[Variable]] ) extends Analysis[Map[Procedure, DSG]] { - val locals : mutable.Map[Procedure, DSG] = mutable.Map() + val locals: mutable.Map[Procedure, DSG] = mutable.Map() val bu: mutable.Map[Procedure, DSG] = mutable.Map() val td: mutable.Map[Procedure, DSG] = mutable.Map() - val stackPointer = Register("R31", 64) - val returnPointer = Register("R30", 64) - val framePointer = Register("R29", 64) + private val stackPointer = Register("R31", 64) + private val returnPointer = Register("R30", 64) + private val framePointer = Register("R29", 64) - val ignoreRegisters: Set[Variable] = Set(stackPointer, returnPointer, framePointer) + private val ignoreRegisters: Set[Variable] = Set(stackPointer, returnPointer, framePointer) - def findLeaf(procedure: Procedure): Set[Procedure] = - if CallGraph.succ(procedure).isEmpty then + private def findLeaf(procedure: Procedure): Set[Procedure] = { + val succ = CallGraph.succ(procedure) + if (succ.isEmpty) { Set(procedure) - else - CallGraph.succ(procedure).foldLeft(Set[Procedure]()){ - (s, proc) => s ++ findLeaf(proc) - } - + } else { + succ.flatMap(findLeaf) + } + } - var visited = Set[Procedure]() - val queue = mutable.Queue[Procedure]() + private var visited = Set[Procedure]() + private val queue = mutable.Queue[Procedure]() override def analyze(): Map[Procedure, DSG] = { - var domain : Set[Procedure] = Set(program.mainProcedure) - val stack : mutable.Stack[Procedure] = mutable.Stack() + var domain: Set[Procedure] = Set(program.mainProcedure) + val stack: mutable.Stack[Procedure] = mutable.Stack() stack.pushAll(program.mainProcedure.calls) // calculate the procedures used in the program - while stack.nonEmpty do + while (stack.nonEmpty) { val current = stack.pop() domain += current stack.pushAll(current.calls.diff(domain)) - + } // perform local analysis on all procs - domain.foreach( - proc => - val dsg = LocalDSA(proc, symResults, constProp, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params).analyze() + domain.foreach { proc => + val dsg = LocalDSA(proc, symResults, constProp, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params).analyze() - locals.update(proc, dsg) - bu.update(proc, dsg.cloneSelf()) - ) + locals.update(proc, dsg) + bu.update(proc, dsg.cloneSelf()) + } - Map() val leafNodes = findLeaf(program.mainProcedure) - leafNodes.foreach( - proc => - assert(locals(proc).callsites.isEmpty) - visited += proc - val preds : Set[Procedure] = CallGraph.pred(proc) - queue.enqueueAll(CallGraph.pred(proc).diff(visited).intersect(domain)) - ) + leafNodes.foreach { proc => + assert(locals(proc).callsites.isEmpty) + visited += proc + //val preds: Set[Procedure] = CallGraph.pred(proc) + queue.enqueueAll(CallGraph.pred(proc).diff(visited).intersect(domain)) + } // bottom up phase while queue.nonEmpty do @@ -92,140 +89,127 @@ class DSA(program: Program, queue.enqueueAll(CallGraph.pred(proc).diff(visited)) val buGraph = bu(proc) - buGraph.callsites.foreach( - callSite => - val callee = callSite.proc - val calleeGraph = locals(callee) //.cloneSelf() - assert(buGraph.globalMapping.keySet.equals(calleeGraph.globalMapping.keySet)) - assert(calleeGraph.formals.keySet.diff(ignoreRegisters).equals(callSite.paramCells.keySet)) - - calleeGraph.globalMapping.foreach { - case (range: AddressRange, Field(node, offset)) => - val newNode = calleeGraph.find(node).node - newNode.cloneNode(calleeGraph, buGraph) + buGraph.callsites.foreach { callSite => + val callee = callSite.proc + val calleeGraph = locals(callee) //.cloneSelf() + assert(buGraph.globalMapping.keySet.equals(calleeGraph.globalMapping.keySet)) + assert(calleeGraph.formals.keySet.diff(ignoreRegisters).equals(callSite.paramCells.keySet)) + calleeGraph.globalMapping.values.foreach { field => + val newNode = calleeGraph.find(field.node).node + newNode.cloneNode(calleeGraph, buGraph) + } + + calleeGraph.formals.foreach { (variable, slice) => + if (!ignoreRegisters.contains(variable)) { + assert(callSite.paramCells.contains(variable)) + val node = calleeGraph.find(slice).node + node.cloneNode(calleeGraph, buGraph) } + } - calleeGraph.formals.foreach { - case (variable: Variable, slice: Slice) if !ignoreRegisters.contains(variable) => - assert(callSite.paramCells.contains(variable)) - val node = calleeGraph.find(slice).node - node.cloneNode(calleeGraph, buGraph) - case _ => + assert(writesTo(callee).equals(callSite.returnCells.keySet)) + writesTo(callee).foreach { reg => + assert(callSite.returnCells.contains(reg)) + val returnCells = calleeGraph.getCells(IRWalk.lastInProc(callee).get, reg).map(calleeGraph.find) + assert(returnCells.nonEmpty) + returnCells.foreach { slice => + val node = calleeGraph.find(slice).node + node.cloneNode(calleeGraph, buGraph) } + } + + // assert(calleeGraph.formals.isEmpty || buGraph.varToCell(begin(callee)).equals(calleeGraph.formals)) + calleeGraph.globalMapping.foreach { + case (range: AddressRange, Field(node: DSN, offset: BigInt)) => + val field = calleeGraph.find(node) + buGraph.mergeCells( + buGraph.globalMapping(range).node.getCell(buGraph.globalMapping(range).offset), + field.node.getCell(field.offset + offset) + ) + } - assert(writesTo(callee).equals(callSite.returnCells.keySet)) - writesTo(callee).foreach( - reg => - assert(callSite.returnCells.contains(reg)) - val returnCells = calleeGraph.getCells(IRWalk.lastInProc(callee).get, reg).map(calleeGraph.find) - assert(returnCells.nonEmpty) - returnCells.foreach{ - case slice: Slice => - val node = calleeGraph.find(slice).node - node.cloneNode(calleeGraph, buGraph) - } - ) - - // assert(calleeGraph.formals.isEmpty || buGraph.varToCell(begin(callee)).equals(calleeGraph.formals)) - val globalNodes: mutable.Map[Int, DSN] = mutable.Map() - calleeGraph.globalMapping.foreach { - case (range: AddressRange, Field(node: DSN, offset: BigInt)) => - val field = calleeGraph.find(node) - buGraph.mergeCells(buGraph.globalMapping(range)._1.getCell(buGraph.globalMapping(range)._2), - field.node.getCell(field.offset + offset)) + if (buGraph.varToCell.contains(callee)) { + buGraph.varToCell(callee).keys.foreach { variable => + if (!ignoreRegisters.contains(variable)) { + val formal = buGraph.varToCell(callee)(variable) + buGraph.mergeCells(buGraph.adjust(formal), buGraph.adjust(callSite.paramCells(variable))) + } } + } - buGraph.varToCell.getOrElse(callee, Map.empty).foreach{ - case (variable: Variable, formal) if !ignoreRegisters.contains(variable) => - val test = buGraph.mergeCells(buGraph.adjust(formal), buGraph.adjust(callSite.paramCells(variable))) - test - case _ => + writesTo(callee).foreach { reg => + val returnCells = buGraph.getCells(IRWalk.lastInProc(callee).get, reg) + // assert(returnCells.nonEmpty) + returnCells.foldLeft(buGraph.adjust(callSite.returnCells(reg))) { (c, ret) => + buGraph.mergeCells(c, buGraph.adjust(ret)) } - writesTo(callee).foreach( - reg => - val returnCells = buGraph.getCells(IRWalk.lastInProc(callee).get, reg) - // assert(returnCells.nonEmpty) - val result: DSC = returnCells.foldLeft(buGraph.adjust(callSite.returnCells(reg))){ - // - case (c: DSC, ret) => - val test = buGraph.mergeCells(c, buGraph.adjust(ret)) - test - } - ) - ) - buGraph.collectNodes + } + } + buGraph.collectNodes() // bottom up phase finished // clone bu graphs to top-down graphs - domain.foreach( - proc => - td.update(proc, bu(proc).cloneSelf()) - ) + domain.foreach { proc => + td.update(proc, bu(proc).cloneSelf()) + } queue.enqueue(program.mainProcedure) visited = Set() - // top-down phase - while queue.nonEmpty do + while (queue.nonEmpty) { val proc = queue.dequeue() visited += proc queue.enqueueAll(CallGraph.succ(proc).diff(visited)) val callersGraph = td(proc) - callersGraph.callsites.foreach( - callSite => - val callee = callSite.proc - val calleesGraph = td(callee) - assert(callersGraph.globalMapping.keySet.equals(calleesGraph.globalMapping.keySet)) - - callersGraph.globalMapping.foreach { - case (range: AddressRange, Field(oldNode, offset)) => - val node = callersGraph.find(oldNode).node - node.cloneNode(callersGraph, calleesGraph) - } - - - callSite.paramCells.foreach{ - case (variable: Variable, slice: Slice) => - val node = callersGraph.find(slice).node - node.cloneNode(callersGraph, calleesGraph) - } - - callSite.returnCells.foreach{ - case (variable: Variable, slice: Slice) => - val node = callersGraph.find(slice).node - node.cloneNode(callersGraph, callersGraph) - } - - - callersGraph.globalMapping.foreach { - case (range: AddressRange, Field(oldNode, internal)) => -// val node = callersGraph - val field = callersGraph.find(oldNode) - calleesGraph.mergeCells(calleesGraph.globalMapping(range)._1.getCell(calleesGraph.globalMapping(range)._2), - field.node.getCell(field.offset + internal)) - } - - callSite.paramCells.keySet.foreach( - variable => - val paramCells = calleesGraph.getCells(callSite.call, variable) // wrong param offset - paramCells.foldLeft(calleesGraph.adjust(calleesGraph.formals(variable))) { - (cell, slice) => - calleesGraph.mergeCells(calleesGraph.adjust(slice), cell) - } + callersGraph.callsites.foreach { callSite => + val callee = callSite.proc + val calleesGraph = td(callee) + assert(callersGraph.globalMapping.keySet.equals(calleesGraph.globalMapping.keySet)) + + callersGraph.globalMapping.values.foreach { field => + val oldNode = field.node + val node = callersGraph.find(oldNode).node + node.cloneNode(callersGraph, calleesGraph) + } + + callSite.paramCells.values.foreach { slice => + val node = callersGraph.find(slice).node + node.cloneNode(callersGraph, calleesGraph) + } + + callSite.returnCells.values.foreach { slice => + val node = callersGraph.find(slice).node + node.cloneNode(callersGraph, callersGraph) + } + + callersGraph.globalMapping.foreach { case (range: AddressRange, Field(oldNode, internal)) => + // val node = callersGraph + val field = callersGraph.find(oldNode) + calleesGraph.mergeCells( + calleesGraph.globalMapping(range).node.getCell(calleesGraph.globalMapping(range).offset), + field.node.getCell(field.offset + internal) ) + } - calleesGraph.varToCell.getOrElse(callSite.call, Map.empty).foreach{ - case (variable: Variable, oldSlice: Slice) => - val slice = callersGraph.find(oldSlice) - val returnCells = calleesGraph.getCells(IRWalk.lastInProc(callee).get, variable) - returnCells.foldLeft(calleesGraph.adjust(slice)){ - case (c: DSC, retCell: Slice) => - calleesGraph.mergeCells(c, calleesGraph.adjust(retCell)) - } - case _ => ??? + callSite.paramCells.keySet.foreach { variable => + val paramCells = calleesGraph.getCells(callSite.call, variable) // wrong param offset + paramCells.foldLeft(calleesGraph.adjust(calleesGraph.formals(variable))) { + (cell, slice) => calleesGraph.mergeCells(calleesGraph.adjust(slice), cell) + } + } + + if (calleesGraph.varToCell.contains(callSite.call)) { + calleesGraph.varToCell(callSite.call).foreach { (variable, oldSlice) => + val slice = callersGraph.find(oldSlice) + val returnCells = calleesGraph.getCells(IRWalk.lastInProc(callee).get, variable) + returnCells.foldLeft(calleesGraph.adjust(slice)) { + (c, retCell) => calleesGraph.mergeCells(c, calleesGraph.adjust(retCell)) + } } - ) - callersGraph.collectNodes + } + } + callersGraph.collectNodes() + } td.toMap } diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/DSAUtility.scala index 54ec1118a..2a7b888dc 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/DSAUtility.scala @@ -2,15 +2,16 @@ package analysis import analysis.solvers.{DSAUniTerm, DSAUnionFindSolver, UnionFindSolver, Var} import cfg_visualiser.{DotStruct, DotStructElement, StructArrow, StructDotGraph} -import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IntraProcIRCursor, Literal, Memory, MemoryAssign, MemoryLoad, Procedure, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, IRWalk, computeDomain, toShortString} +import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IRWalk, IntraProcIRCursor, Literal, Memory, MemoryAssign, MemoryLoad, Procedure, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, computeDomain, toShortString} import specification.{ExternalFunction, SpecGlobal, SymbolTableEntry} import util.Logger import scala.util.control.Breaks.{break, breakable} import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer object NodeCounter { - var counter: Int = 0 + private var counter: Int = 0 def getCounter: Int = counter = counter + 1 @@ -42,24 +43,22 @@ class DSG(val proc: Procedure, // DSNodes owned by this graph, only updated once analysis is done, val nodes: mutable.Set[DSN] = mutable.Set() - // Points-to relations in this graph, only updated once the analysis is done, val pointsto: mutable.Map[DSC, Slice] = mutable.Map() // represent callees in proc val callsites: mutable.Set[CallSite] = mutable.Set() - val mallocRegister = Register("R0", 64) - val stackPointer = Register("R31", 64) + private val mallocRegister = Register("R0", 64) // collect all stack access and their maximum accessed size // BigInt is the offset of the stack position and Int is it's size - val stackAccesses: Map[BigInt, Int] = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(Map[BigInt, Int]()) { + private val stackAccesses: Map[BigInt, Int] = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(Map[BigInt, Int]()) { (results, pos) => pos match - case Assign(variable: Variable, expr: Expr, _) => + case Assign(_, expr, _) => expr match - case MemoryLoad(mem, index, endian, size) => + case MemoryLoad(_, index, _, size) => visitStackAccess(pos, index, size).foldLeft(results) { (res, access) => if !res.contains(access.offset) || (res.getOrElse(access.offset, -1) < access.size) then @@ -75,7 +74,7 @@ class DSG(val proc: Procedure, else res } - case MemoryAssign(mem, index: Expr, value: Expr, endian, size: Int, label) => + case MemoryAssign(_, index, _, _, size, _) => visitStackAccess(pos, index, size).foldLeft(results) { (res, access) => if !res.contains(access.offset) || (res.getOrElse(access.offset, -1) < access.size) then @@ -86,254 +85,209 @@ class DSG(val proc: Procedure, case _ => results } - case class StackAccess(offset: BigInt, size: Int) + private case class StackAccess(offset: BigInt, size: Int) + private def visitStackAccess(pos: CFGPosition, index: Expr, size: Int): Set[StackAccess] = { - assert( size % 8 == 0) + assert(size % 8 == 0) val byteSize = size / 8 index match - case BinaryExpr(op, arg1: Variable, arg2) if varToSym.contains(pos) && varToSym(pos).contains(arg1) && + case BinaryExpr(_, arg1: Variable, arg2) if varToSym.contains(pos) && varToSym(pos).contains(arg1) && evaluateExpression(arg2, constProp(pos)).isDefined => val offset = evaluateExpression(arg2, constProp(pos)).get.value - varToSym(pos)(arg1).foldLeft(Set[StackAccess]()) { // go through all the symbolic accesses tied to arg1 at pos - (m, sym) => - sym match - case SymbolicAddress(accessor, StackLocation(regionIdentifier, proc, size), symOffset) => // only consider stack accesses - m + StackAccess(offset + symOffset, byteSize) - case _ => m + varToSym(pos)(arg1).flatMap { + case SymbolicAddress(_, _: StackLocation, symOffset) => // only consider stack accesses + Some(StackAccess(offset + symOffset, byteSize)) + case _ => None } case arg: Variable if varToSym.contains(pos) && varToSym(pos).contains(arg) => - varToSym(pos)(arg).foldLeft(Set[StackAccess]()) { - (m, sym) => - sym match - case SymbolicAddress(accessor, StackLocation(regionIdentifier, proc, size), offset) => -// createStackMapping(pos.toShortString, offset, m, byteSize) - m + StackAccess(offset, byteSize) - case _ => m + varToSym(pos)(arg).flatMap { + case SymbolicAddress(_, _: StackLocation, offset) => + // createStackMapping(pos.toShortString, offset, m, byteSize) + Some(StackAccess(offset, byteSize)) + case _ => + None } case _ => Set.empty } - // this is the mapping from offsets/positions on the stack to their representative DS nodes val stackMapping: mutable.Map[BigInt, DSN] = mutable.Map() - var lastOffset: BigInt = -1 - var nextValidOffset: BigInt = 0 - stackAccesses.keys.toSeq.sorted.foreach( - offset => - val byteSize = stackAccesses(offset) - if offset >= nextValidOffset then - val node = DSN(Some(this), byteSize) - node.allocationRegions.add(StackLocation(s"Stack_${proc}_${offset}", proc, byteSize)) - node.flags.stack = true - node.addCell(0, byteSize) - stackMapping.update(offset, node) - lastOffset = offset - else - val diff = nextValidOffset - offset - stackMapping(lastOffset).addCell(diff, byteSize) - nextValidOffset = offset + byteSize - ) + private var lastOffset: BigInt = -1 + private var nextValidOffset: BigInt = 0 + stackAccesses.keys.toSeq.sorted.foreach { offset => + val byteSize = stackAccesses(offset) + if offset >= nextValidOffset then + val node = DSN(Some(this), byteSize) + node.allocationRegions.add(StackLocation(s"Stack_${proc}_$offset", proc, byteSize)) + node.flags.stack = true + node.addCell(0, byteSize) + stackMapping.update(offset, node) + lastOffset = offset + else + val diff = nextValidOffset - offset + stackMapping(lastOffset).addCell(diff, byteSize) + nextValidOffset = offset + byteSize + } private val swappedOffsets = globalOffsets.map(_.swap) // creates the globals from the symbol tables - val globalMapping: mutable.Map[AddressRange, Field] = mutable.Map[AddressRange, Field]() - globals.foreach( - global => - val node = DSN(Some(this), global.size) - node.allocationRegions.add(DataLocation(global.name, global.address, global.size/8)) - node.flags.global = true - node.flags.incomplete = true - globalMapping.update(AddressRange(global.address, global.address + global.size/8), Field(node, 0)) - ) + val globalMapping = mutable.Map[AddressRange, Field]() + globals.foreach { global => + val node = DSN(Some(this), global.size) + node.allocationRegions.add(DataLocation(global.name, global.address, global.size / 8)) + node.flags.global = true + node.flags.incomplete = true + globalMapping.update(AddressRange(global.address, global.address + global.size / 8), Field(node, 0)) + } // creates a global for each relocation entry in the symbol table // the global corresponding to the relocated address points to the global corresponding to the original address - globals.foreach( - global => - var address = global.address - breakable { - while swappedOffsets.contains(address) do - val relocatedAddress = swappedOffsets(address) - if relocatedAddress == address then - break - - var field: BigInt = 0 - val node: DSN = isGlobal(relocatedAddress) match - case Some(value) => - field = relocatedAddress - value._1._1 - val node = value._2._1 - node.addCell(field, 8) - node - - case None => - val node = DSN(Some(this)) - node.allocationRegions.add(DataLocation(s"Relocated_$relocatedAddress", relocatedAddress, 8)) - node.flags.global = true - node.flags.incomplete = true - globalMapping.update(AddressRange(relocatedAddress, relocatedAddress + 8), Field(node, 0)) - node + globals.foreach { global => + var address = global.address + breakable { + while (swappedOffsets.contains(address)) { + val relocatedAddress = swappedOffsets(address) + if relocatedAddress == address then + break - node.cells(field).pointee = Some(Slice(isGlobal(address).get._2._1.cells(0), 0)) - address = relocatedAddress - } - ) + var field: BigInt = 0 + val node: DSN = isGlobal(relocatedAddress) match + case Some(value) => + field = relocatedAddress - value.addressRange.start + val node = value.field.node + node.addCell(field, 8) + node - externalFunctions.foreach( - external => - val node = DSN(Some(this)) - node.allocationRegions.add(DataLocation(external.name, external.offset, 0)) - node.flags.global = true - node.flags.incomplete = true - globalMapping.update(AddressRange(external.offset, external.offset), Field(node, 0)) - ) + case None => + val node = DSN(Some(this)) + node.allocationRegions.add(DataLocation(s"Relocated_$relocatedAddress", relocatedAddress, 8)) + node.flags.global = true + node.flags.incomplete = true + globalMapping.update(AddressRange(relocatedAddress, relocatedAddress + 8), Field(node, 0)) + node + node.cells(field).pointee = Some(Slice(isGlobal(address).get.field.node.cells(0), 0)) + address = relocatedAddress + } + } + } + + externalFunctions.foreach { external => + val node = DSN(Some(this)) + node.allocationRegions.add(DataLocation(external.name, external.offset, 0)) + node.flags.global = true + node.flags.incomplete = true + globalMapping.update(AddressRange(external.offset, external.offset), Field(node, 0)) + } // determine if an address is a global and return the corresponding global if it is. - def isGlobal(address: BigInt): Option[DSAGlobal] = - var global: Option[DSAGlobal] = None + def isGlobal(address: BigInt): Option[DSAGlobal] = { + var global: Option[DSAGlobal] = None breakable { - for (elem <- globalMapping) { - val range = elem._1 - val field = elem._2 + for ((range, field) <- globalMapping) { if address >= range.start && (address < range.end || (range.start == range.end && range.end == address)) then global = Some(DSAGlobal(range, field)) break } } global + } - def getCells(pos: CFGPosition, arg: Variable): Set[Slice] = - if reachingDefs(pos).contains(arg) then - reachingDefs(pos)(arg).foldLeft(Set[Slice]()) { - (s, defintion) => - s + varToCell(defintion)(arg) - } - else + def getCells(pos: CFGPosition, arg: Variable): Set[Slice] = { + if (reachingDefs(pos).contains(arg)) { + reachingDefs(pos)(arg).map(definition => varToCell(definition)(arg)) + } else { Set(formals(arg)) + } + } /** * collects all the nodes that are currently in the DSG and updates nodes member variable */ - def collectNodes = + def collectNodes(): Unit = { nodes.clear() pointsto.clear() - nodes.addAll(formals.values.map(_._1.node.get).map(n => find(n).node)) - varToCell.values.foreach( - value => nodes.addAll(value.values.map(_._1.node.get).map(n => find(n).node)) - ) + nodes.addAll(formals.values.map(n => find(n.cell.node.get).node)) + varToCell.values.foreach { + value => nodes.addAll(value.values.map(n => find(n.cell.node.get).node)) + } nodes.addAll(stackMapping.values.map(n => find(n).node)) - nodes.addAll(globalMapping.values.map(_._1).map(n => find(n).node)) + nodes.addAll(globalMapping.values.map(n => find(n.node).node)) val queue: mutable.Queue[DSN] = mutable.Queue() queue.enqueueAll(nodes) - while queue.nonEmpty do + while (queue.nonEmpty) { val cur = queue.dequeue() - cur.cells.foreach { - case (offset: BigInt, cell: DSC) if cell.pointee.isDefined => + cur.cells.values.foreach { cell => + if (cell.pointee.isDefined) { val node = find(cell.getPointee.node).node if !nodes.contains(node) then nodes.add(node) queue.enqueue(node) assert(!pointsto.contains(cell)) pointsto.update(cell, find(cell.getPointee)) - case _ => + } } + } + } def toDot: String = { - collectNodes + collectNodes() + val structs = ArrayBuffer[DotStruct]() + val arrows = ArrayBuffer[StructArrow]() - var structs = nodes.foldLeft(Set[DotStruct]()) { - (s, n) => - s + DotStruct(n.id.toString, n.toString, Some(n.cells.keys.map(o => o.toString))) + nodes.foreach { n => + structs.append(DotStruct(n.id.toString, n.toString, Some(n.cells.keys.map(o => o.toString)))) } - structs ++= formals.foldLeft(Set[DotStruct]()) { - (s, n) => - val variable = n._1.name - s + DotStruct(s"Formal_$variable", s"Formal_$variable", None) + formals.keys.foreach { variable => + structs.append(DotStruct(s"Formal_$variable", s"Formal_$variable", None)) } - structs ++= varToCell.foldLeft(Set[DotStruct]()) { - (s, r) => - val pos = r._1 - val mapping = r._2 - s ++ mapping.foldLeft(Set[DotStruct]()) { - (k, n) => - val variable = n._1.name - k + DotStruct(s"SSA_${ - if pos.toShortString.startsWith("%") then pos.toShortString.drop(1) else pos.toShortString - }_$variable", s"SSA_${pos}_$variable", None, false) - } + pointsto.foreach { (cell, pointee) => + val pointerID = cell.node.get.id.toString + val pointerOffset = cell.offset.toString + arrows.append(StructArrow(DotStructElement(pointerID, Some(pointerOffset)), DotStructElement(pointee.node.id.toString, Some(pointee.cell.offset.toString)), pointee.internalOffset.toString)) } - - structs ++= globalMapping.foldLeft(Set[DotStruct]()) { - (s, n) => - val range = n._1 - s + DotStruct(s"Global_${range.start}_${range.end}", s"Global_$range", None) + formals.foreach { (variable, slice) => + val value = find(slice) + arrows.append(StructArrow(DotStructElement(s"Formal_${variable.name}", None), DotStructElement(value.node.id.toString, Some(value.cell.offset.toString)), value.internalOffset.toString)) } - structs ++= stackMapping.foldLeft(Set[DotStruct]()) { - (s, n) => - val offset = n._1 - s + DotStruct(s"Stack_$offset", s"Stack_$offset", None) - } - - var arrows = - pointsto.foldLeft(Set[StructArrow]()) { - case (s: Set[StructArrow], (cell: DSC, pointee: Slice)) => - val pointerID = cell.node.get.id.toString - val pointerOffset = cell.offset.toString - - s + StructArrow(DotStructElement(pointerID, Some(pointerOffset)), DotStructElement(pointee.node.id.toString, Some(pointee.cell.offset.toString)), pointee.internalOffset.toString) + varToCell.foreach { (pos, mapping) => + var id = pos.toShortString + if (id.startsWith("%")) { + id = id.drop(1) + } + mapping.foreach { (variable, slice) => + structs.append(DotStruct(s"SSA_${id}_${variable.name}", s"SSA_${pos}_${variable.name}", None, false)) + val value = find(slice) + arrows.append(StructArrow(DotStructElement(s"SSA_${id}_${variable.name}", None), DotStructElement(value.node.id.toString, Some(value.cell.offset.toString)), value.internalOffset.toString)) } - - arrows ++= formals.foldLeft(Set[StructArrow]()) { - (s, n) => - val variable = n._1.name - val value = find(n._2) - s + StructArrow(DotStructElement(s"Formal_$variable", None), DotStructElement(value.node.id.toString, Some(value.cell.offset.toString)), value.internalOffset.toString) - } - - arrows ++= varToCell.foldLeft(Set[StructArrow]()) { - (s, r) => - val pos = r._1 - val mapping = r._2 - s ++ mapping.foldLeft(Set[StructArrow]()) { - (k, n) => - val variable = n._1.name - val value = find(n._2) - k + StructArrow(DotStructElement(s"SSA_${ - if pos.toShortString.startsWith("%") then pos.toShortString.drop(1) else pos.toShortString - }_$variable", None), DotStructElement(value.node.id.toString, Some(value.cell.offset.toString)), value.internalOffset.toString) - } } - - arrows ++= globalMapping.foldLeft(Set[StructArrow]()) { - (s, n) => - val range = n._1 - val node= find(n._2.node).node - val offset = n._2.offset + find(n._2.node).offset - val cellOffset = node.getCell(offset).offset - val internalOffset = offset - cellOffset - s + StructArrow(DotStructElement(s"Global_${range.start}_${range.end}", None), DotStructElement(node.id.toString, Some(cellOffset.toString)), internalOffset.toString) + globalMapping.foreach { (range, field) => + structs.append(DotStruct(s"Global_${range.start}_${range.end}", s"Global_$range", None)) + val node = find(field.node).node + val offset = field.offset + find(field.node).offset + val cellOffset = node.getCell(offset).offset + val internalOffset = offset - cellOffset + arrows.append(StructArrow(DotStructElement(s"Global_${range.start}_${range.end}", None), DotStructElement(node.id.toString, Some(cellOffset.toString)), internalOffset.toString)) } - arrows ++= stackMapping.foldLeft(Set[StructArrow]()) { - (s, n) => - val offset = n._1 - val node = find(n._2).node - val nodeOffset = find(n._2).offset - val cellOffset = node.getCell(nodeOffset).offset - val internalOffset = nodeOffset - cellOffset - s + StructArrow(DotStructElement(s"Stack_$offset", None), DotStructElement(node.id.toString, Some(cellOffset.toString)), internalOffset.toString) + stackMapping.foreach { (offset, dsn) => + structs.append(DotStruct(s"Stack_$offset", s"Stack_$offset", None)) + val node = find(dsn).node + val nodeOffset = find(dsn).offset + val cellOffset = node.getCell(nodeOffset).offset + val internalOffset = nodeOffset - cellOffset + arrows.append(StructArrow(DotStructElement(s"Stack_$offset", None), DotStructElement(node.id.toString, Some(cellOffset.toString)), internalOffset.toString)) } - StructDotGraph(proc.name, structs, arrows).toDotString } @@ -341,59 +295,58 @@ class DSG(val proc: Procedure, /** * Collapses the node causing it to lose field sensitivity */ - def collapseNode(n: DSN): DSN = - - - val (term, offset) = solver.findWithOffset(n.term) + def collapseNode(n: DSN): DSN = { + val (term, _) = solver.findWithOffset(n.term) val node: DSN = term.node - if !(n.collapsed || find(n).node.collapsed) then - + if (!(n.collapsed || find(n).node.collapsed)) { val collapsedNode: DSN = DSN(n.graph) - val collapedCell = DSC(Some(collapsedNode), 0) + val collapsedCell = DSC(Some(collapsedNode), 0) n.flags.collapsed = true collapsedNode.flags.collapsed = true var pointeeInternalOffset: BigInt = 0 var pointToItself = false - var cell = node.cells.tail.foldLeft(adjust(node.cells.head._2.getPointee)) { - (c, field) => - val cell = field._2 - val pointee = cell.pointee - if pointee.isDefined && adjust(cell.getPointee) == cell then - pointToItself = true - c - else if pointee.isDefined then - val slice = cell.getPointee - if slice.internalOffset > pointeeInternalOffset then - pointeeInternalOffset = slice.internalOffset - mergeCells(c, adjust(slice)) - else - c + val cells = node.cells.values + var cell = cells.tail.foldLeft(adjust(cells.head.getPointee)) { (c, cell) => + val pointee = cell.pointee + if (pointee.isDefined && adjust(cell.getPointee) == cell) { + pointToItself = true + c + } else if (pointee.isDefined) { + val slice = cell.getPointee + if (slice.internalOffset > pointeeInternalOffset) { + pointeeInternalOffset = slice.internalOffset + } + mergeCells(c, adjust(slice)) + } else { + c + } } - if pointToItself then - cell = mergeCells(cell, collapedCell) - + if (pointToItself) { + cell = mergeCells(cell, collapsedCell) + } - collapedCell.pointee = Some(Slice(collapedCell, 0)) + collapsedCell.pointee = Some(Slice(collapsedCell, 0)) assert(collapsedNode.cells.size == 1) - collapsedNode.children.addAll(node.children) collapsedNode.children += (node -> 0) collapsedNode.allocationRegions.addAll(node.allocationRegions) // add regions and flags of node 1 to node 2 collapsedNode.flags.join(node.flags) - solver.unify(n.term, collapsedNode.term, 0) collapsedNode - else + } else { assert(find(n).node.collapsed) find(n).node + } + } + /** * this function merges all the overlapping cells in the given node @@ -403,15 +356,17 @@ class DSG(val proc: Procedure, var lastOffset: BigInt = -1 var lastAccess: BigInt = -1 val removed = mutable.Set[BigInt]() - node.cells.toSeq.sortBy(_._1).foreach { - case (offset: BigInt, cell: DSC) => - if lastOffset + lastAccess > offset then - val result = mergeNeighbours(node.cells(lastOffset), cell) - removed.add(offset) - lastAccess = result.largestAccessedSize - else - lastOffset = offset - lastAccess = cell.largestAccessedSize + val sortedOffsets = node.cells.keys.toSeq.sorted + sortedOffsets.foreach { offset => + val cell = node.cells(offset) + if (lastOffset + lastAccess > offset) { + val result = mergeNeighbours(node.cells(lastOffset), cell) + removed.add(offset) + lastAccess = result.largestAccessedSize + } else { + lastOffset = offset + lastAccess = cell.largestAccessedSize + } } removed.foreach(node.cells.remove) } @@ -419,21 +374,23 @@ class DSG(val proc: Procedure, /** * merges two neighbouring cells into one */ - def mergeNeighbours(cell1: DSC, cell2: DSC): DSC = + private def mergeNeighbours(cell1: DSC, cell2: DSC): DSC = { require(cell1.node.equals(cell2.node) && cell1.offset < cell2.offset) - if cell2.pointee.isDefined then - if cell1.pointee.isDefined then + if (cell2.pointee.isDefined) { + if (cell1.pointee.isDefined) { val slice1 = cell1.getPointee val slice2 = cell2.getPointee val result = mergeCells(adjust(slice1), adjust(slice2)) cell1.pointee = Some(Slice(result, slice2.internalOffset.max(slice1.internalOffset))) - else + } else { cell1.pointee = cell2.pointee + } + } val internalOffsetChange = cell2.offset - cell1.offset cell2.node.get.cells.remove(cell2.offset) cell1.growSize((cell2.offset - cell1.offset).toInt + cell2.largestAccessedSize) // might cause another collapse cell1 - + } // private val parent = mutable.Map[DSC, DSC]() val solver: DSAUnionFindSolver = DSAUnionFindSolver() @@ -443,10 +400,11 @@ class DSG(val proc: Procedure, * @param node the node to perform find on * @return a field which is the tuple (parent node of the input node, starting offset of the input node in its parent) */ - def find(node: DSN) : Field = + def find(node: DSN): Field = { val (n, offset) = solver.findWithOffset(node.term) val resultNode = n.node Field(resultNode, offset) + } /** * wrapper for find functionality of the union-find @@ -454,14 +412,13 @@ class DSG(val proc: Procedure, * @param cell the cell to perform find on * @return the input cell's equivalent cell in the parent */ - def find(cell: DSC) : DSC = + def find(cell: DSC): DSC = { val node = cell.node.get - val offset = cell.offset val parent: Field = find(node) parent.node.addCell(cell.offset + parent.offset, cell.largestAccessedSize) + } - def find(slice: Slice) : Slice = - deadjust(adjust(slice)) + def find(slice: Slice): Slice = deadjust(adjust(slice)) /** * merges two cells and unifies their nodes @@ -469,8 +426,7 @@ class DSG(val proc: Procedure, * @param cell2 * @return the resulting cell in the unified node */ - def mergeCells(c1: DSC, c2: DSC): DSC = - + def mergeCells(c1: DSC, c2: DSC): DSC = { var cell1 = c1 var cell2 = c2 if c1.node.isDefined then @@ -506,10 +462,10 @@ class DSG(val proc: Procedure, val slice2 = node2.cells(0).getPointee val result = mergeCells(adjust(slice1), adjust(slice2)) node2.cells(0).pointee = Some(Slice(result, slice1.internalOffset.max(slice2.internalOffset))) - else + else node2.cells(0).pointee = node1.cells(0).pointee -// node1.cells(0).pointee = None -// replace(node1.cells(0), node2.cells(0), 0) + // node1.cells(0).pointee = None + // replace(node1.cells(0), node2.cells(0), 0) solver.unify(node1.term, node2.term, 0) node2.cells(0) else // standard merge @@ -526,12 +482,10 @@ class DSG(val proc: Procedure, // create a seq of all cells from both nodes in order of their offsets in the resulting unified node - val cells : Seq[(BigInt, DSC)] = (node1.cells.toSeq ++ node2.cells.foldLeft(Seq[(BigInt, DSC)]()){ - (s, tuple) => - val offset = tuple._1 - val cell = tuple._2 - s:+ ((offset + delta, cell)) // cells from nodes two are adjusted by the difference between cell1 and cell2 offsets - }).sortBy(_._1) + + val node2CellsOffset = node2.cells.toSeq.map((offset, cell) => (offset + delta, cell)) + + val cells: Seq[(BigInt, DSC)] = (node1.cells.toSeq ++ node2CellsOffset).sortBy(_(0)) var lastOffset: BigInt = -1 var lastAccess: Int = -1 @@ -543,11 +497,13 @@ class DSG(val proc: Procedure, resultNode.flags.join(node2.flags) resultNode.children.addAll(node1.children) resultNode.children += (node1 -> 0) - resultNode.children.addAll(node2.children.map(f => (f._1, f._2 + delta))) + node2.children.keys.foreach { k => + resultNode.children(k) = node2.children(k) + delta + } resultNode.children += (node2 -> delta) if node2.flags.global then // node 2 may have been adjusted depending on cell1 and cell2 offsets - globalMapping.foreach{ // update global mapping if node 2 was global - case (range: AddressRange, Field(node, offset))=> + globalMapping.foreach { // update global mapping if node 2 was global + case (range: AddressRange, Field(node, offset)) => if node.equals(node2) then globalMapping.update(range, Field(node, offset + delta)) } @@ -555,41 +511,46 @@ class DSG(val proc: Procedure, // compute the cells present in the resulting unified node // a mapping from offsets to the set of old cells which are merged to form a cell in the new unified node // values in the mapping also include the largest access size so far computed for each resulting cell - val resultCells: mutable.Map[BigInt, (Set[DSC], Int)] = mutable.Map() - cells.foreach { - case (offset: BigInt, cell: DSC) => - if (lastOffset + lastAccess > offset) || lastOffset == offset then // includes this cell - if (offset - lastOffset) + cell.largestAccessedSize > lastAccess then - lastAccess = (offset - lastOffset).toInt + cell.largestAccessedSize - resultCells.update(lastOffset, (resultCells(lastOffset)._1 + cell, lastAccess)) - else - lastOffset = offset - lastAccess = cell.largestAccessedSize - resultCells.update(lastOffset, (Set(cell), lastAccess)) + val resultCells = mutable.Map[BigInt, mutable.Set[DSC]]() + val resultLargestAccesses = mutable.Map[BigInt, Int]() + cells.foreach { (offset, cell) => + if ((lastOffset + lastAccess > offset) || lastOffset == offset) { // includes this cell + if ((offset - lastOffset) + cell.largestAccessedSize > lastAccess) { + lastAccess = (offset - lastOffset).toInt + cell.largestAccessedSize + } + if (resultCells.contains(lastOffset)) { + resultCells(lastOffset).addOne(cell) + } else { + resultCells(lastOffset) = mutable.Set(cell) + } + resultLargestAccesses(lastOffset) = lastAccess + } else { + lastOffset = offset + lastAccess = cell.largestAccessedSize + resultCells(lastOffset) = mutable.Set(cell) + resultLargestAccesses(lastOffset) = lastAccess + } } - resultCells.foreach { - case (offset: BigInt, (cells: Set[DSC], largestAccess: Int)) => - val collapsedCell = resultNode.addCell(offset, largestAccess) - val outgoing: Set[Slice] = cells.foldLeft(Set[Slice]()){ - (set, cell) => - - // collect outgoing edges - if cell.pointee.isDefined then - val pointee = cell.getPointee - set + pointee - else - set + resultCells.keys.foreach { offset => + val collapsedCell = resultNode.addCell(offset, resultLargestAccesses(offset)) + val outgoing: Set[Slice] = cells.flatMap { (_, cell) => + if (cell.pointee.isDefined) { + Some(cell.getPointee) + } else { + None } - // replace outgoing edges - if outgoing.size == 1 then - collapsedCell.pointee = Some(outgoing.head) - else if outgoing.size > 1 then - val result = outgoing.tail.foldLeft(adjust(outgoing.head)){ - (result, pointee) => - mergeCells(result, adjust(pointee)) - } - collapsedCell.pointee = Some(deadjust(result)) + }.toSet + // replace outgoing edges + if (outgoing.size == 1) { + collapsedCell.pointee = Some(outgoing.head) + } else if (outgoing.size > 1) { + val result = outgoing.tail.foldLeft(adjust(outgoing.head)) { + (result, pointee) => + mergeCells(result, adjust(pointee)) + } + collapsedCell.pointee = Some(deadjust(result)) + } } solver.unify(node1.term, resultNode.term, 0) @@ -598,173 +559,172 @@ class DSG(val proc: Procedure, resultNode.getCell(cell1.offset) else resultNode.getCell(cell2.offset) + } - - def adjust(cell: DSC, internalOffset: BigInt): DSC = - val link = solver.findWithOffset(cell.node.get.term) - val node = link._1.node - val linkOffset = link._2 + def adjust(cell: DSC, internalOffset: BigInt): DSC = { + val (term, linkOffset) = solver.findWithOffset(cell.node.get.term) + val node = term.node node.addCell(cell.offset + internalOffset + linkOffset, 0) + } - def adjust(slice: Slice, offset: BigInt = 0): DSC = + def adjust(slice: Slice, offset: BigInt = 0): DSC = { val cell = slice.cell val internal = slice.internalOffset adjust(cell, internal + offset) + } - def deadjust(cell: DSC) : Slice = + def deadjust(cell: DSC): Slice = { val node = cell.node.get val offset = cell.offset selfCollapse(node) val newCell = node.getCell(offset) assert(offset >= newCell.offset) Slice(newCell, offset - newCell.offset) + } - - private def isFormal(pos: CFGPosition, variable: Variable): Boolean = - !reachingDefs(pos).contains(variable) + private def isFormal(pos: CFGPosition, variable: Variable): Boolean = !reachingDefs(pos).contains(variable) // formal arguments to this function val formals: mutable.Map[Variable, Slice] = mutable.Map() // mapping from each SSA variable (position, variable) to a slice - val varToCell: mutable.Map[CFGPosition, mutable.Map[Variable, Slice]] = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(mutable.Map[CFGPosition, mutable.Map[Variable, Slice]]()) { - (m, pos) => - pos match - case Assign(variable, value, _) => - value.variables.foreach( - v => - if isFormal(pos, v) then - val node = DSN(Some(this)) - node.flags.incomplete = true - nodes.add(node) - formals.update(v, Slice(node.cells(0), 0)) - ) - val node = DSN(Some(this)) - m +=(pos -> mutable.Map(variable -> Slice(node.cells(0), 0))) - case DirectCall(target, _) if target.name == "malloc" => - val node = DSN(Some(this)) - m += (pos -> mutable.Map(mallocRegister -> Slice(node.cells(0), 0))) - case DirectCall(target, _) if writesTo.contains(target) => - val result: Map[Variable, Slice] = writesTo(target).foldLeft(Map[Variable, Slice]()){ - (n, variable) => - val node = DSN(Some(this)) - n + (variable -> Slice(node.cells(0), 0)) - } - m += (pos -> result.to(mutable.Map)) - case MemoryAssign(memory, index: Expr, expr: Expr, endian, size: Int, label) if unwrapPaddingAndSlicing(expr).isInstanceOf[Variable] => - val value: Variable = unwrapPaddingAndSlicing(expr).asInstanceOf[Variable] - if isFormal(pos, value) then + val varToCell: mutable.Map[CFGPosition, mutable.Map[Variable, Slice]] = varToCellInit(proc) + + private def varToCellInit(proc: Procedure): mutable.Map[CFGPosition, mutable.Map[Variable, Slice]] = { + val varToCell = mutable.Map[CFGPosition, mutable.Map[Variable, Slice]]() + val domain = computeDomain(IntraProcIRCursor, Set(proc)) + domain.foreach { + case pos @ Assign(variable, value, _) => + value.variables.foreach { v => + if (isFormal(pos, v)) { val node = DSN(Some(this)) node.flags.incomplete = true nodes.add(node) - formals.update(value, Slice(node.cells(0), 0)) - m - case _ => m + formals.update(v, Slice(node.cells(0), 0)) + } + } + val node = DSN(Some(this)) + varToCell(pos) = mutable.Map(variable -> Slice(node.cells(0), 0)) + case pos @ DirectCall(target, _) if target.name == "malloc" => + val node = DSN(Some(this)) + varToCell(pos) = mutable.Map(mallocRegister -> Slice(node.cells(0), 0)) + case pos @ DirectCall(target, _) if writesTo.contains(target) => + val result = mutable.Map[Variable, Slice]() + writesTo(target).foreach { variable => + val node = DSN(Some(this)) + result(variable) = Slice(node.cells(0), 0) + } + varToCell(pos) = result + case pos @ MemoryAssign(_, _, expr, _, _, _) => + unwrapPaddingAndSlicing(expr) match { + case value: Variable => + if (isFormal(pos, value)) { + val node = DSN(Some(this)) + node.flags.incomplete = true + nodes.add(node) + formals.update(value, Slice(node.cells(0), 0)) + } + case _ => + } + case _ => + } + varToCell } - - def cloneSelf(): DSG = + def cloneSelf(): DSG = { val newGraph = DSG(proc, constProp, varToSym, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params) assert(formals.size == newGraph.formals.size) val nodes = mutable.Set[DSN]() val idToNode: mutable.Map[Int, DSN] = mutable.Map() - formals.foreach{ - case (variable: Variable, slice: Slice) => -// assert(newGraph.formals.contains(variable)) - val node = find(slice).node + formals.foreach { (variable, slice) => + // assert(newGraph.formals.contains(variable)) + val node = find(slice).node + nodes.add(node) + if !idToNode.contains(node.id) then + val newNode = node.cloneSelf(newGraph) + idToNode.update(node.id, newNode) + newGraph.formals.update(variable, Slice(idToNode(node.id).cells(slice.offset), slice.internalOffset)) + } + + varToCell.foreach { (position, values) => + // assert(newGraph.varToCell.contains(position)) + if (!newGraph.varToCell.contains(position)) { + newGraph.varToCell.update(position, mutable.Map[Variable, Slice]()) + } + values.foreach { (variable, s) => + // assert(newGraph.varToCell(position).contains(variable)) + val slice = find(s) + val node = slice.node nodes.add(node) - if !idToNode.contains(node.id) then + if (!idToNode.contains(node.id)) { val newNode = node.cloneSelf(newGraph) idToNode.update(node.id, newNode) - newGraph.formals.update(variable, Slice(idToNode(node.id).cells(slice.offset), slice.internalOffset)) - } - - varToCell.foreach { - case (position: CFGPosition, values: mutable.Map[Variable, Slice]) => -// assert(newGraph.varToCell.contains(position)) - if !newGraph.varToCell.contains(position) then - newGraph.varToCell.update(position, mutable.Map[Variable, Slice]()) - values.foreach{ - case (variable: Variable, s: Slice) => -// assert(newGraph.varToCell(position).contains(variable)) - val slice = find(s) - val node = slice.node - nodes.add(node) - if !idToNode.contains(node.id) then - val newNode = node.cloneSelf(newGraph) - idToNode.update(node.id, newNode) - newGraph.varToCell(position).update(variable, Slice(idToNode(node.id).cells(slice.offset), slice.internalOffset)) } + newGraph.varToCell(position).update(variable, Slice(idToNode(node.id).cells(slice.offset), slice.internalOffset)) + } } - stackMapping.foreach{ - case (offset, oldNode) => - val node = find(oldNode).node - nodes.add(node) - assert(newGraph.stackMapping.contains(offset)) - if !idToNode.contains(node.id) then - val newNode = node.cloneSelf(newGraph) - idToNode.update(node.id, newNode) - newGraph.stackMapping.update(offset, idToNode(node.id)) + stackMapping.foreach { (offset, oldNode) => + val node = find(oldNode).node + nodes.add(node) + assert(newGraph.stackMapping.contains(offset)) + if !idToNode.contains(node.id) then + val newNode = node.cloneSelf(newGraph) + idToNode.update(node.id, newNode) + newGraph.stackMapping.update(offset, idToNode(node.id)) } - globalMapping.foreach { - case (range: AddressRange, Field(node, offset)) => - assert(newGraph.globalMapping.contains(range)) - val field = find(node) - nodes.add(field.node) - if !idToNode.contains(field.node.id) then - val newNode = node.cloneSelf(newGraph) - idToNode.update(field.node.id, newNode) - newGraph.globalMapping.update(range, Field(idToNode(field.node.id), field.offset + offset)) + globalMapping.foreach { case (range: AddressRange, Field(node, offset)) => + assert(newGraph.globalMapping.contains(range)) + val field = find(node) + nodes.add(field.node) + if !idToNode.contains(field.node.id) then + val newNode = node.cloneSelf(newGraph) + idToNode.update(field.node.id, newNode) + newGraph.globalMapping.update(range, Field(idToNode(field.node.id), field.offset + offset)) } - val queue: mutable.Queue[DSN] = mutable.Queue() + val queue = mutable.Queue[DSN]() queue.addAll(nodes) - while queue.nonEmpty do - + while (queue.nonEmpty) { val node = queue.dequeue() - node.cells.foreach { - case (offset: BigInt, cell: DSC) if cell.pointee.isDefined => + node.cells.values.foreach { cell => + if (cell.pointee.isDefined) { val id = cell.node.get.id val pointee = find(cell.getPointee) val pointeeId = pointee.node.id - if !idToNode.contains(pointeeId) then + if (!idToNode.contains(pointeeId)) { queue.enqueue(pointee.node) val newNode = pointee.node.cloneSelf(newGraph) idToNode.update(pointeeId, newNode) + } idToNode(id).cells(cell.offset).pointee = Some(Slice(idToNode(pointeeId).cells(pointee.offset), pointee.internalOffset)) - - - case _ => - } - - callsites.foreach( - callSite => - val cs = CallSite(callSite.call, newGraph) - newGraph.callsites.add(cs) - assert(cs.paramCells.keySet.equals(callSite.paramCells.keySet)) - callSite.paramCells.foreach{ - case (variable: Variable, oldSlice : Slice) => - val slice = find(oldSlice) - assert(cs.paramCells.contains(variable)) - val id = slice.node.id - cs.paramCells.update(variable, Slice(idToNode(id).cells(slice.offset), slice.internalOffset)) } + } + } - callSite.returnCells.foreach{ - case (variable: Variable, oldSlice: Slice) => - val slice = find(oldSlice) - assert(cs.returnCells.contains(variable)) - val id = slice.node.id - cs.returnCells.update(variable, Slice(idToNode(id).cells(slice.offset), slice.internalOffset)) - } - ) + callsites.foreach { callSite => + val cs = CallSite(callSite.call, newGraph) + newGraph.callsites.add(cs) + assert(cs.paramCells.keySet.equals(callSite.paramCells.keySet)) + callSite.paramCells.foreach { (variable, oldSlice) => + val slice = find(oldSlice) + assert(cs.paramCells.contains(variable)) + val id = slice.node.id + cs.paramCells.update(variable, Slice(idToNode(id).cells(slice.offset), slice.internalOffset)) + } + callSite.returnCells.foreach { (variable, oldSlice) => + val slice = find(oldSlice) + assert(cs.returnCells.contains(variable)) + val id = slice.node.id + cs.returnCells.update(variable, Slice(idToNode(id).cells(slice.offset), slice.internalOffset)) + } + } newGraph.nodes.addAll(idToNode.values) newGraph - + } } class Flags() { @@ -783,7 +743,7 @@ class Flags() { stack = other.stack || stack heap = other.heap || heap global = other.global || global - unknown =other.unknown || unknown + unknown = other.unknown || unknown read = other.read || read modified = other.modified || modified incomplete = other.incomplete || incomplete @@ -793,44 +753,47 @@ class Flags() { /** * a Data structure Node */ -class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCounter.getCounter) { +class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCounter.getCounter) { - val term = DSAUniTerm(this) - val children : mutable.Map[DSN, BigInt] = mutable.Map() + val term: DSAUniTerm = DSAUniTerm(this) + val children: mutable.Map[DSN, BigInt] = mutable.Map() // var collapsed = false - var flags = Flags() - def collapsed = flags.collapsed + var flags: Flags = Flags() + def collapsed: Boolean = flags.collapsed val allocationRegions: mutable.Set[MemoryLocation] = mutable.Set() val cells: mutable.Map[BigInt, DSC] = mutable.Map() this.addCell(0, 0) - def updateSize(newSize: BigInt): Unit = - + private def updateSize(newSize: BigInt): Unit = { if newSize > size then size = newSize + } - def getCell(offset: BigInt): DSC = - if collapsed then + def getCell(offset: BigInt): DSC = { + if (collapsed) { cells(0) - else if !cells.contains(offset) then + } else if (!cells.contains(offset)) { var result: Option[DSC] = None - cells.foreach { - case (start: BigInt, cell: DSC) => - if start <= offset && offset < (start + cell.largestAccessedSize) then - result = Some(cell) + cells.foreach { (start, cell) => + if (start <= offset && offset < (start + cell.largestAccessedSize)) { + result = Some(cell) + } } - result match + result match { case Some(value) => value case None => ??? -// Logger.warn(s"$this didn't have a cell at offset: $offset. An empty cell was added in") -// addCell(offset, 0) - else + // Logger.warn(s"$this didn't have a cell at offset: $offset. An empty cell was added in") + // addCell(offset, 0) + } + } else { cells(offset) - - - def addCell(offset: BigInt, size: Int) : DSC = + } + } + + + def addCell(offset: BigInt, size: Int): DSC = { this.updateSize(offset + size) if collapsed then cells(0) @@ -842,67 +805,64 @@ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCount else cells(offset).growSize(size) cells(offset) + } - def cloneSelf(graph: DSG) : DSN = + def cloneSelf(graph: DSG): DSN = { val node = DSN(Some(graph), this.size) node.allocationRegions.addAll(this.allocationRegions) node.flags.join(this.flags) - cells.foreach{ - case (offset: BigInt, cell: DSC) => - node.addCell(offset, cell.largestAccessedSize) + cells.foreach { (offset, cell) => + node.addCell(offset, cell.largestAccessedSize) } node + } - def cloneNode(from: DSG, to: DSG): Unit = + def cloneNode(from: DSG, to: DSG): Unit = { // assert(from.nodes.contains(this)) TODO update nodes after each phase for to check this assertion - if !to.nodes.contains(this) then + if (!to.nodes.contains(this)) { to.nodes.add(this) - - from.varToCell.foreach( - t => - val pos = t._1 - val varMap = t._2 - varMap.foreach{ - case (variable: Variable, slice: Slice) => - if from.find(slice).node.equals(this) then - to.varToCell.update( - pos, - - to.varToCell.getOrElseUpdate(pos, - mutable.Map[Variable, Slice]()) ++ Map(variable -> from.find(slice)) - ) + from.varToCell.foreach { (pos, varMap) => + varMap.foreach { (variable, slice) => + if (from.find(slice).node.equals(this)) { + if (to.varToCell.contains(pos)) { + to.varToCell(pos)(variable) = from.find(slice) + } else { + to.varToCell(pos) = mutable.Map(variable -> from.find(slice)) + } + } + } + } + from.formals.foreach { (variable, slice) => + if (from.find(slice).node.equals(this)) { + if (to.varToCell.contains(from.proc)) { + to.varToCell(from.proc)(variable) = from.find(slice) + } else { + to.varToCell(from.proc) = mutable.Map(variable -> from.find(slice)) } - ) - from.formals.foreach{ - case (variable: Variable, slice: Slice) => - if from.find(slice).node.equals(this) then - to.varToCell.update( - from.proc, - to.varToCell.getOrElseUpdate(from.proc, - mutable.Map[Variable, Slice]()) ++ Map(variable -> from.find(slice)) - ) + } } - - cells.foreach { - case (offset: BigInt, cell: DSC) => - if cell.pointee.isDefined then + cells.values.foreach { cell => + if (cell.pointee.isDefined) { val pointee = cell.getPointee pointee.node.cloneNode(from, to) -// to.pointTo.update(cell, pointee) TODO check this is not necessary + // to.pointTo.update(cell, pointee) TODO check this is not necessary + } } + } + } - override def equals(obj: Any): Boolean = + override def equals(obj: Any): Boolean = { obj match case node: DSN => this.id == node.id case _ => false + } override def hashCode(): Int = id override def toString: String = s"Node($id, $allocationRegions ${if collapsed then ", collapsed" else ""})" - } /** @@ -910,21 +870,19 @@ class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCount * @param node the node this cell belongs to * @param offset the offset of the cell */ -class DSC(val node: Option[DSN], val offset: BigInt) -{ +class DSC(val node: Option[DSN], val offset: BigInt) { var largestAccessedSize: Int = 0 // the cell's pointee - var pointee : Option[Slice] = None + var pointee: Option[Slice] = None // returns the cell's pointee if it has one. // if not it will create a placeholder, set it as the pointee of this cell and return it - def getPointee : Slice = + def getPointee: Slice = if pointee.isEmpty then val node = DSN(Some(this.node.get.graph.get)) pointee = Some(Slice(node.cells(0), 0)) else - val graph = pointee.get.node.graph.get val resolvedPointee = graph.find(graph.adjust(pointee.get)) @@ -937,13 +895,11 @@ class DSC(val node: Option[DSN], val offset: BigInt) true else false - override def equals(obj: Any): Boolean = obj match - case cell:DSC => this.node.equals(cell.node) && this.offset.equals(cell.offset) + case cell: DSC => this.node.equals(cell.node) && this.offset.equals(cell.offset) case _ => false - override def toString: String = s"Cell(${if node.isDefined then node.get.toString else "NONE"}, $offset)" } @@ -962,7 +918,7 @@ case class Slice(cell: DSC, internalOffset: BigInt) { * @param graph caller's DSG */ class CallSite(val call: DirectCall, val graph: DSG) { - val proc = call.target + val proc: Procedure = call.target val paramCells: mutable.Map[Variable, Slice] = graph.params(proc).foldLeft(mutable.Map[Variable, Slice]()) { (m, reg) => val node = DSN(Some(graph)) @@ -977,13 +933,11 @@ class CallSite(val call: DirectCall, val graph: DSG) { } } - - case class DSAGlobal(addressRange: AddressRange, field: Field) { - def start: BigInt = addressRange.start - def end: BigInt = addressRange.end - def node: DSN = field.node - def offset: BigInt = field.offset + lazy val start: BigInt = addressRange.start + lazy val end: BigInt = addressRange.end + lazy val node: DSN = field.node + lazy val offset: BigInt = field.offset } // global address range @@ -1002,8 +956,8 @@ def unwrapPaddingAndSlicing(expr: Expr): Expr = case BinaryExpr(op, arg1, arg2) => BinaryExpr(op, unwrapPaddingAndSlicing(arg1), unwrapPaddingAndSlicing(arg2)) case MemoryLoad(mem, index, endian, size) => MemoryLoad(mem, unwrapPaddingAndSlicing(index), endian, size) case variable: Variable => variable - case Extract(end, start, body) /*if start == 0 && end == 32*/ => unwrapPaddingAndSlicing(body) // this may make it unsound - case ZeroExtend(extension, body) => unwrapPaddingAndSlicing(body) + case Extract(_, _, body) /*if start == 0 && end == 32*/ => unwrapPaddingAndSlicing(body) // this may make it unsound + case ZeroExtend(_, body) => unwrapPaddingAndSlicing(body) case _ => expr diff --git a/src/main/scala/analysis/LocalDSA.scala b/src/main/scala/analysis/LocalDSA.scala index a5fc667a7..cbb0ed368 100644 --- a/src/main/scala/analysis/LocalDSA.scala +++ b/src/main/scala/analysis/LocalDSA.scala @@ -29,8 +29,8 @@ class LocalDSA( externalFunctions: Set[ExternalFunction], reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], writesTo: Map[Procedure, Set[Register]], - params: Map[Procedure, Set[Variable]] - ) extends Analysis[Any]{ + params: Map[Procedure, Set[Variable]] + ) extends Analysis[Any] { private val mallocRegister = Register("R0", 64) private val stackPointer = Register("R31", 64) @@ -38,59 +38,55 @@ class LocalDSA( // set of cfg positions already processed by the analysis local phase private val visited: mutable.Set[CFGPosition] = mutable.Set() - - // variables to symbolic access map for each cfg position - val varToSym: Map[CFGPosition, Map[Variable, Set[SymbolicAddress]]] = symResults.foldLeft(Map[CFGPosition, Map[Variable, Set[SymbolicAddress]]]()) { - (outerMap, syms) => - val position = syms._1 - val innerMap = syms._2.foldLeft(Map[Variable, Set[SymbolicAddress]]()) { - (m, access) => - if m.contains(access._1.accessor) then - // every variable pointing to a stack region ONLY has one symbolic access associated with it. -// m(access._1.accessor).foreach( -// sym => -// if (sym.symbolicBase.isInstanceOf[StackLocation]) then -// println(m) -// println(access._1.accessor) -// println(access) -// print("") -// //assert(!sym.symbolicBase.isInstanceOf[StackLocation]) -// ) -// assert(!access._1.symbolicBase.isInstanceOf[StackLocation]) - m + (access._1.accessor -> (m(access._1.accessor) + access._1)) - else - m + (access._1.accessor -> Set(access._1)) + val varToSym: Map[CFGPosition, Map[Variable, Set[SymbolicAddress]]] = symResults.map { (position, innerMap) => + val newMap = innerMap.keys.foldLeft(Map[Variable, Set[SymbolicAddress]]()) { (m, access) => + if (m.contains(access.accessor)) { + // every variable pointing to a stack region ONLY has one symbolic access associated with it. + // m(access.accessor).foreach( + // sym => + // if (sym.symbolicBase.isInstanceOf[StackLocation]) then + // println(m) + // println(access.accessor) + // println(access) + // print("") + // //assert(!sym.symbolicBase.isInstanceOf[StackLocation]) + // ) + // assert(!access.symbolicBase.isInstanceOf[StackLocation]) + m + (access.accessor -> (m(access.accessor) + access)) + } else { + m + (access.accessor -> Set(access)) } - outerMap + (position -> innerMap) + } + position -> newMap } - private def getStack(offset: BigInt): DSC = + private def getStack(offset: BigInt): DSC = { var last: BigInt = 0 if graph.stackMapping.contains(offset) then graph.stackMapping(offset).cells(0) else breakable { - graph.stackMapping.keys.foreach( + graph.stackMapping.keys.foreach { elementOffset => if offset < elementOffset then break else last = elementOffset - ) + } } val diff = offset - last assert(graph.stackMapping.contains(last)) graph.stackMapping(last).getCell(diff) - + } /** * if an expr is the address of a stack location return its corresponding cell * @param pos IL position where the expression is used */ - def isStack(expr: Expr, pos: CFGPosition): Option[DSC] = + private def isStack(expr: Expr, pos: CFGPosition): Option[DSC] = { expr match - case BinaryExpr(op, arg1: Variable, arg2) if varToSym.contains(pos) && varToSym(pos).contains(arg1) && + case BinaryExpr(_, arg1: Variable, arg2) if varToSym.contains(pos) && varToSym(pos).contains(arg1) && varToSym(pos)(arg1).exists(s => s.symbolicBase.isInstanceOf[StackLocation]) => evaluateExpression(arg2, constProp(pos)) match case Some(v) => @@ -110,7 +106,7 @@ class LocalDSA( } Some(res) case _ => None - + } var mallocCount: Int = 0 @@ -120,13 +116,12 @@ class LocalDSA( } val graph: DSG = DSG(proc, constProp, varToSym, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params) - - + /** * if an expr is the address of a global location return its corresponding cell * @param pos IL position where the expression is used */ - def isGlobal(expr: Expr, pos: CFGPosition, size: Int = 0): Option[DSC] = + def isGlobal(expr: Expr, pos: CFGPosition, size: Int = 0): Option[DSC] = { val value = evaluateExpression(expr, constProp(pos)) if value.isDefined then val global = graph.isGlobal(value.get.value) @@ -144,7 +139,7 @@ class LocalDSA( None else None - + } /** * Handles unification for instructions of the form R_x = R_y [+ offset] where R_y is a pointer and [+ offset] is optional @@ -157,7 +152,7 @@ class LocalDSA( * @param offset offset if [+ offset] is present * @return the cell resulting from the unification */ - private def visitPointerArithmeticOperation(position: CFGPosition, lhs: DSC, rhs: Variable, size: Int, pointee: Boolean = false, offset: BigInt = 0, collapse: Boolean = false) : DSC = + private def visitPointerArithmeticOperation(position: CFGPosition, lhs: DSC, rhs: Variable, size: Int, pointee: Boolean = false, offset: BigInt = 0, collapse: Boolean = false): DSC = // visit all the defining pointer operation on rhs variable first reachingDefs(position)(rhs).foreach(visit) // get the cells of all the SSA variables in the set @@ -168,8 +163,8 @@ class LocalDSA( // merge the cells or their pointees with lhs var result = cells.foldLeft(lhs) { (c, t) => - val cell = t._1 - val internalOffset = t._2 + val cell = t.cell + val internalOffset = t.internalOffset if !collapse then // offset != 0 then // it's R_x = R_y + offset val node = cell.node.get // get the node of R_y var field = offset + cell.offset + internalOffset // calculate the total offset @@ -189,19 +184,18 @@ class LocalDSA( } if pointee then - cells.foreach( - t => - val offset = t._1.offset - val internalOffset = t._2 - val node = t._1.node.get - val cell = graph.find(node.getCell(offset + internalOffset)) - if cell.pointee.isDefined && graph.find(cell.getPointee._1).equals(result) then - graph.selfCollapse(node) -// assert(graph.pointTo.contains(node.getCell(offset))) TODO - result = graph.find(graph.find(node.getCell(offset)).getPointee._1) - else - graph.selfCollapse(node) - ) + cells.foreach { t => + val offset = t.cell.offset + val internalOffset = t.internalOffset + val node = t.cell.node.get + val cell = graph.find(node.getCell(offset + internalOffset)) + if cell.pointee.isDefined && graph.find(cell.getPointee.cell).equals(result) then + graph.selfCollapse(node) + // assert(graph.pointTo.contains(node.getCell(offset))) TODO + result = graph.find(graph.find(node.getCell(offset)).getPointee.cell) + else + graph.selfCollapse(node) + } val resultOffset = result.offset graph.selfCollapse(result.node.get) result.node.get.getCell(result.offset) @@ -209,14 +203,14 @@ class LocalDSA( /** * handles unsupported pointer arithmetic by collapsing all the nodes invloved */ - def unsupportedPointerArithmeticOperation(n: CFGPosition, expr: Expr, lhsCell: DSC): DSC = { + private def unsupportedPointerArithmeticOperation(n: CFGPosition, expr: Expr, lhsCell: DSC): DSC = { val cell = expr.variables.foldLeft(lhsCell) { (c, v) => val cells: Set[Slice] = graph.getCells(n, v) cells.foldLeft(c) { (c, p) => - graph.mergeCells(c, p._1) + graph.mergeCells(c, p.cell) } } @@ -240,21 +234,19 @@ class LocalDSA( val node = DSN(Some(graph), size) node.allocationRegions.add(HeapLocation(nextMallocCount, target, size)) node.flags.heap = true - graph.mergeCells(graph.varToCell(n)(mallocRegister)._1, node.cells(0)) + graph.mergeCells(graph.varToCell(n)(mallocRegister).cell, node.cells(0)) case call: DirectCall if params.contains(call.target) => // Rx, Ry, ... Rn = FunctionCall() // create call sites for the callees val cs = CallSite(call, graph) graph.callsites.add(cs) - cs.paramCells.foreach{ - case (variable: Variable, slice: Slice) => - visitPointerArithmeticOperation(call, graph.adjust(slice), variable, 0) + cs.paramCells.foreach { (variable, slice) => + visitPointerArithmeticOperation(call, graph.adjust(slice), variable, 0) } - cs.returnCells.foreach{ - case (variable: Variable, slice: Slice) => - val returnArgument = graph.varToCell(n)(variable) - graph.mergeCells(graph.adjust(returnArgument), graph.adjust(slice)) + cs.returnCells.foreach { (variable, slice) => + val returnArgument = graph.varToCell(n)(variable) + graph.mergeCells(graph.adjust(returnArgument), graph.adjust(slice)) } - case Assign(variable: Variable, rhs: Expr, maybeString) => + case Assign(variable, rhs, _) => val expr: Expr = unwrapPaddingAndSlicing(rhs) val lhsCell = graph.adjust(graph.varToCell(n)(variable)) var global = isGlobal(rhs, n) @@ -270,7 +262,6 @@ class LocalDSA( if op.equals(BVADD) && arg1.equals(stackPointer) && arg2Offset.isDefined && isNegative(arg2Offset.get) then () // the stack is handled prior to this - else if /*varToSym.contains(n) && varToSym(n).contains(arg1) && */ arg2Offset.isDefined then // merge lhs with cell(s) corresponding to (arg1 + arg2) where arg1 is cell and arg2 is an offset val offset = evaluateExpression(arg2, constProp(n)).get.value @@ -282,7 +273,7 @@ class LocalDSA( case arg: Variable /*if varToSym.contains(n) && varToSym(n).contains(arg)*/ => visitPointerArithmeticOperation(n, lhsCell, arg, 0) - case MemoryLoad(mem, index, endian, size) => // Rx = Mem[Ry], merge Rx and pointee of Ry (E(Ry)) + case MemoryLoad(_, index, _, size) => // Rx = Mem[Ry], merge Rx and pointee of Ry (E(Ry)) assert(size % 8 == 0) val byteSize = size/8 lhsCell.node.get.flags.read = true @@ -312,64 +303,67 @@ class LocalDSA( case _ => unsupportedPointerArithmeticOperation(n, expr, lhsCell) - case MemoryAssign(memory, ind: Expr, expr: Expr, endian: Endian, size: Int, label) if unwrapPaddingAndSlicing(expr).isInstanceOf[Variable] => // if value is a literal ignore it - // Mem[Ry] = Rx - val value: Variable = unwrapPaddingAndSlicing(expr).asInstanceOf[Variable] - val index: Expr = unwrapPaddingAndSlicing(ind) - reachingDefs(n)(value).foreach(visit) - assert(size % 8 == 0) - val byteSize = size / 8 - val global = isGlobal(index, n, byteSize) - val stack = isStack(index, n) - val addressPointee: DSC = - if global.isDefined then - graph.adjust(graph.find(global.get).getPointee) - else if stack.isDefined then - graph.adjust(graph.find(stack.get).getPointee) - else - index match - case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) => - evaluateExpression(arg2, constProp(n)) match - case Some(v) => -// assert(varToSym(n).contains(arg1)) - val offset = v.value - visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg1, byteSize, true, offset) - case None => -// assert(varToSym(n).contains(arg1)) - // collapse the results - // visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg1, byteSize, true, 0, true) - unsupportedPointerArithmeticOperation(n, index,DSN(Some(graph)).cells(0)) - case arg: Variable => -// assert(varToSym(n).contains(arg)) - visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg, byteSize, true) - case _ => - ??? - - addressPointee.node.get.flags.modified = true - val valueCells = graph.getCells(n, value) - val result = valueCells.foldLeft(addressPointee) { - (c, slice) => - graph.mergeCells(graph.adjust(slice), c) + case MemoryAssign(_, ind, expr, _, size, _) => + val unwrapped = unwrapPaddingAndSlicing(expr) + unwrapped match { + // Mem[Ry] = Rx + case value: Variable => + val index: Expr = unwrapPaddingAndSlicing(ind) + reachingDefs(n)(value).foreach(visit) + assert(size % 8 == 0) + val byteSize = size / 8 + val global = isGlobal(index, n, byteSize) + val stack = isStack(index, n) + val addressPointee: DSC = + if global.isDefined then + graph.adjust(graph.find(global.get).getPointee) + else if stack.isDefined then + graph.adjust(graph.find(stack.get).getPointee) + else + index match + case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) => + evaluateExpression(arg2, constProp(n)) match + case Some(v) => + // assert(varToSym(n).contains(arg1)) + val offset = v.value + visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg1, byteSize, true, offset) + case None => + // assert(varToSym(n).contains(arg1)) + // collapse the results + // visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg1, byteSize, true, 0, true) + unsupportedPointerArithmeticOperation(n, index, DSN(Some(graph)).cells(0)) + case arg: Variable => + // assert(varToSym(n).contains(arg)) + visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg, byteSize, true) + case _ => + ??? + + addressPointee.node.get.flags.modified = true + val valueCells = graph.getCells(n, value) + val result = valueCells.foldLeft(addressPointee) { (c, slice) => + graph.mergeCells(graph.adjust(slice), c) + } + case _ => // if value is a literal ignore it } - case _ => writeToFile(graph.toDot, "test.dot") } - def analyze(): DSG = + def analyze(): DSG = { val domain = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString) domain.foreach(visit) val b = graph.solver.solution() - graph.collectNodes -// graph.nodes.foreach(node => -// node.children.foreach( -// child => -// assert(graph.solver.find(child._1.term).equals(graph.solver.find(node.term))) -// assert(graph.solver.find(child._1.term)._2.equals(child._2)) -// -// ) -// ) + graph.collectNodes() + // graph.nodes.foreach(node => + // node.children.foreach( + // child => + // assert(graph.solver.find(child._1.term).equals(graph.solver.find(node.term))) + // assert(graph.solver.find(child._1.term)._2.equals(child._2)) + // + // ) + // ) graph + } } diff --git a/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala b/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala index 47592f082..59a0fb9f5 100644 --- a/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala +++ b/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala @@ -3,20 +3,19 @@ package analysis import ir.* import analysis.solvers.SimpleWorklistFixpointSolver -case class ReachingDefinitionsAnalysis(program: Program) { +type TupleElement = + TupleLattice[MapLattice[Variable, Set[Assign], PowersetLattice[Assign]], MapLattice[Variable, Set[Assign], PowersetLattice[Assign]], Map[Variable, Set[Assign]], Map[Variable, Set[Assign]]] - type Definition = Assign // local assign is a definition because it is a statement and statements are assumed to be unique - type TupleElement = - TupleLattice[MapLattice[Variable, Set[Definition], PowersetLattice[Definition]], MapLattice[Variable, Set[Definition], PowersetLattice[Definition]], Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]] +trait ReachingDefinitionsAnalysis(program: Program) { - val tupleLattice: TupleLattice[MapLattice[Variable, Set[Definition], PowersetLattice[Definition]], MapLattice[Variable, Set[Assign], PowersetLattice[ - Assign]], Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]] = - new TupleLattice( - new MapLattice[Variable, Set[Definition], PowersetLattice[Definition]](new PowersetLattice[Definition]()), - new MapLattice[Variable, Set[Definition], PowersetLattice[Definition]](new PowersetLattice[Definition]()) + private val tupleLattice: TupleLattice[MapLattice[Variable, Set[Assign], PowersetLattice[Assign]], MapLattice[Variable, Set[Assign], PowersetLattice[ + Assign]], Map[Variable, Set[Assign]], Map[Variable, Set[Assign]]] = + TupleLattice( + MapLattice[Variable, Set[Assign], PowersetLattice[Assign]](PowersetLattice[Assign]()), + MapLattice[Variable, Set[Assign], PowersetLattice[Assign]](PowersetLattice[Assign]()) ) - val lattice: MapLattice[CFGPosition, (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]), TupleElement] = MapLattice( + val lattice: MapLattice[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]]), TupleElement] = MapLattice( tupleLattice ) @@ -25,44 +24,42 @@ case class ReachingDefinitionsAnalysis(program: Program) { /* * Good enough as stmts are unique */ - private def generateUniqueDefinition( - variable: Variable - ): Assign = { + private def generateUniqueDefinition(variable: Variable): Assign = { Assign(variable, BitVecLiteral(0, 0)) } - def transfer(n: CFGPosition, s: (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]])): (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]) = + def transfer(n: CFGPosition, s: (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])): (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]]) = localTransfer(n, s) def localTransfer( n: CFGPosition, - s: (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]) - ): (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]) = n match { + s: (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]]) + ): (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]]) = n match { case cmd: Command => eval(cmd, s) case _ => s } - def transformUses(vars: Set[Variable], s: (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]])): (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]) = { - vars.foldLeft((s._1, Map.empty[Variable, Set[Definition]])) { + private def transformUses(vars: Set[Variable], s: (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])): (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]]) = { + vars.foldLeft((s(0), Map.empty[Variable, Set[Assign]])) { case ((state, acc), v) => (state, acc + (v -> state(v))) } } - def eval(cmd: Command, s: (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]) - ): (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]) = cmd match { + def eval(cmd: Command, s: (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])): + (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]]) = cmd match { case assign: Assign => // do the rhs first (should reset the values for this node to the empty set) // for each variable in the rhs, find the definitions from the lattice lhs and add them to the lattice rhs // for lhs, addOrReplace the definition val rhs = assign.rhs.variables val lhs = assign.lhs - val rhsUseDefs: Map[Variable, Set[Definition]] = rhs.foldLeft(Map.empty[Variable, Set[Definition]]) { + val rhsUseDefs: Map[Variable, Set[Assign]] = rhs.foldLeft(Map.empty[Variable, Set[Assign]]) { case (acc, v) => - acc + (v -> s._1(v)) + acc + (v -> s(0)(v)) } - (s._1 + (lhs -> Set(assign)), rhsUseDefs) + (s(0) + (lhs -> Set(assign)), rhsUseDefs) case assert: Assert => transformUses(assert.body.variables, s) case memoryAssign: MemoryAssign => @@ -77,5 +74,5 @@ case class ReachingDefinitionsAnalysis(program: Program) { class ReachingDefinitionsAnalysisSolver(program: Program) extends ReachingDefinitionsAnalysis(program) - with SimpleWorklistFixpointSolver[CFGPosition, (Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]], Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]]), ReachingDefinitionsAnalysis#TupleElement] - with IRIntraproceduralForwardDependencies + with SimpleWorklistFixpointSolver[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]]), TupleElement] + with IRIntraproceduralForwardDependencies \ No newline at end of file diff --git a/src/main/scala/analysis/SymbolicAddressAnalysis.scala b/src/main/scala/analysis/SymbolicAddressAnalysis.scala index 0a40428d0..3e5b2f291 100644 --- a/src/main/scala/analysis/SymbolicAddressAnalysis.scala +++ b/src/main/scala/analysis/SymbolicAddressAnalysis.scala @@ -43,8 +43,6 @@ case class UnknownLocation(override val regionIdentifier: String, proc: Procedur trait SymbolicAddressFunctions(constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]]) extends ForwardIDEAnalysis[SymbolicAddress, TwoElement, TwoElementLattice] { private val stackPointer = Register("R31", 64) - private val linkRegister = Register("R30", 64) - private val framePointer = Register("R29", 64) private val mallocVariable = Register("R0", 64) var mallocCount: Int = 0 @@ -63,12 +61,11 @@ trait SymbolicAddressFunctions(constProp: Map[CFGPosition, Map[Variable, FlatEle val edgelattice: EdgeFunctionLattice[TwoElement, TwoElementLattice] = EdgeFunctionLattice(valuelattice) import edgelattice.{IdEdge, ConstEdge} - def edgesCallToEntry(call: DirectCall, entry: Procedure)(d: DL): Map[DL, EdgeFunction[TwoElement]] = d match case Left(value) => value.symbolicBase match - case StackLocation(regionIdentifier, parent, size) => Map() + case _: StackLocation => Map() case _ => Map(d -> IdEdge()) case Right(_) => Map(d -> IdEdge()) @@ -78,8 +75,7 @@ trait SymbolicAddressFunctions(constProp: Map[CFGPosition, Map[Variable, FlatEle value.symbolicBase match case _: StackLocation => Map() case _ => - if value.accessor.name == "R29" then - Map() + if value.accessor.name == "R29" then Map() else Map(d -> IdEdge()) case Right(_) => Map(d -> IdEdge()) @@ -102,7 +98,7 @@ trait SymbolicAddressFunctions(constProp: Map[CFGPosition, Map[Variable, FlatEle if op.equals(BVADD) && arg1.equals(stackPointer) && isNegative(v) then d match case Left(value) if value.accessor == variable => Map() - case Left(value) => Map(d -> IdEdge()) + case Left(_) => Map(d -> IdEdge()) case Right(_) => val size = bv2SignedInt(v) Map(d -> IdEdge(), Left(SymbolicAddress(variable, StackLocation(s"Stack_${procedure(n).name}", procedure(n), -size), 0)) -> ConstEdge(TwoElementTop)) @@ -128,10 +124,10 @@ trait SymbolicAddressFunctions(constProp: Map[CFGPosition, Map[Variable, FlatEle result case Left(value) if value.accessor == variable => Map() case _ => Map(d -> IdEdge()) - case MemoryLoad(mem, index, endian, size) => + case _: MemoryLoad => d match case Left(value) if value.accessor == variable => Map() - case Left(value) => Map(d -> IdEdge()) + case Left(_) => Map(d -> IdEdge()) case Right(_) => Map(d -> IdEdge(), Left(SymbolicAddress(variable, UnknownLocation(nextunknownCount, procedure(n)), 0)) -> ConstEdge(TwoElementTop)) case _ => d match @@ -140,8 +136,8 @@ trait SymbolicAddressFunctions(constProp: Map[CFGPosition, Map[Variable, FlatEle case DirectCall(target, _) if target.name == "malloc" => d match case Left(value) if value.accessor == mallocVariable => Map() - case Left(value) => Map(d -> IdEdge()) - case Right(value) => + case Left(_) => Map(d -> IdEdge()) + case Right(_) => val size: BigInt = evaluateExpression(mallocVariable, constProp(n)) match case Some(value) => value.value case None => -1 diff --git a/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala b/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala index d4b24b0c6..24ed1b259 100644 --- a/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala +++ b/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala @@ -5,10 +5,8 @@ import analysis.DSN import scala.collection.mutable class DSAUnionFindSolver extends UnionFindSolver[UniTerm] { - - - val parent = mutable.Map[DSAUniTerm, DSAUniTerm]() - val offsets = mutable.Map[DSAUniTerm, BigInt]() + private val parent = mutable.Map[DSAUniTerm, DSAUniTerm]() + private val offsets = mutable.Map[DSAUniTerm, BigInt]() override def unify(t1: Term[UniTerm], t2: Term[UniTerm]): Unit = unify(t1.asInstanceOf[DSAUniTerm], t2.asInstanceOf[DSAUniTerm], 0) @@ -17,41 +15,38 @@ class DSAUnionFindSolver extends UnionFindSolver[UniTerm] { def unify(t1: DSAUniTerm, t2: DSAUniTerm, offset: BigInt): Unit = { mkSet(t1) mkSet(t2) - val rep1 = findWithOffset(t1)._1 - val rep2 = findWithOffset(t2)._1 - - if (rep1 == rep2) return - - mkUnion(t1, t2, offset) + val (rep1, _) = findWithOffset(t1) + val (rep2, _) = findWithOffset(t2) + + if (rep1 != rep2) { + /** Perform the union of the equivalence classes of `t1` and `t2`, such that `t2` becomes the new canonical element. + * We assume `t1` and `t2` to be distinct canonical elements. This implementation does not use + * [[https://en.wikipedia.org/wiki/Disjoint-set_data_structure union-by-rank]]. + */ + parent += t1 -> t2 + offsets += t1 -> offset + } } - - def findWithOffset(t: DSAUniTerm): (DSAUniTerm, BigInt) = { mkSet(t) if (parent(t) != t) val (par, offset) = findWithOffset(parent(t)) parent += t -> par - offsets += t -> offsets(t).+(offset) + offsets += t -> (offsets(t) + offset) (parent(t), offsets(t)) } - /** Perform the union of the equivalence classes of `t1` and `t2`, such that `t2` becomes the new canonical element. - * We assume `t1` and `t2` to be distinct canonical elements. This implementation does not use - * [[https://en.wikipedia.org/wiki/Disjoint-set_data_structure union-by-rank]]. - */ - private def mkUnion(t1: DSAUniTerm, t2: DSAUniTerm, offset: BigInt): Unit = - parent += t1 -> t2 - offsets += t1 -> offset /** Creates an equivalence class for the term `t`, if it does not exists already. */ - private def mkSet(t: DSAUniTerm): Unit = + private def mkSet(t: DSAUniTerm): Unit = { if (!parent.contains(t)) parent += (t -> t) offsets += (t -> 0) - + } + } /** Terms used in unification. @@ -62,6 +57,6 @@ sealed trait UniTerm */ case class DSAUniTerm(node: DSN) extends Var[UniTerm] { - override def toString: String = s"Term{${node}}" + override def toString: String = s"Term{$node}" } diff --git a/src/main/scala/analysis/solvers/UnionFindSolver.scala b/src/main/scala/analysis/solvers/UnionFindSolver.scala index bde8ef4e7..efc09b2a2 100644 --- a/src/main/scala/analysis/solvers/UnionFindSolver.scala +++ b/src/main/scala/analysis/solvers/UnionFindSolver.scala @@ -88,7 +88,7 @@ class UnionFindSolver[A] { /** Produces a string representation of the solution. */ override def toString: String = - solution().map(p => s"${p._1} = ${p._2}").mkString("\n") + solution().map((k, v) => s"$k = $v").mkString("\n") } /** Exception thrown in case of unification failure. diff --git a/src/test/scala/DSATest.scala b/src/test/scala/DSATest.scala index 3a4f0393e..ffbd55c3d 100644 --- a/src/test/scala/DSATest.scala +++ b/src/test/scala/DSATest.scala @@ -88,7 +88,7 @@ class DSATest extends AnyFunSuite { assert(dsg.adjust(stack24.getPointee).equals(stack24)) // 00000466, R31 + 32 and R31 + 24 pointees are merged // __stack_chk_guard's pointee is also pointed to by stack40 - assert(dsg.find(dsg.adjust(stack40.getPointee)).equals(dsg.find(dsg.adjust(dsg.find(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69600))._1.cells(0).getPointee)).getPointee)))) + assert(dsg.find(dsg.adjust(stack40.getPointee)).equals(dsg.find(dsg.adjust(dsg.find(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69600)).node.cells(0).getPointee)).getPointee)))) } @@ -97,21 +97,21 @@ class DSATest extends AnyFunSuite { // global mappings // __libc_csu_init relocation - assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124)).node.cells(0)))) // __lib_csu_fini relocation - assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268)).node.cells(0)))) // jumptable relocation - assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(0)))) // add_two relocation - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36)).node.cells(0)))) // add_six relocation - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36)).node.cells(0)))) // sub_seven relocation - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36)).node.cells(0)))) // main relocation - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76)).node.cells(0)))) // x relocation - assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4)).node.cells(0)))) } test("local jumptable2 callees") { @@ -144,7 +144,7 @@ class DSATest extends AnyFunSuite { // all three load value of x // the analysis doesn't know if x is a pointer or not therefore assumes it is for soundness // arbitrary pointer is used in arithmetic causing collapse - assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.get.collapsed) + assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652)).node.cells(0)).getPointee).node.get.collapsed) ) } @@ -180,7 +180,7 @@ class DSATest extends AnyFunSuite { assertJumptable2Globals(dsg) // x should not be collapsed in the main function's local graph - assert(!dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)).getPointee.node.collapsed) + assert(!dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4)).node.cells(0)).getPointee.node.collapsed) } @@ -313,7 +313,7 @@ class DSATest extends AnyFunSuite { val mem = SharedMemory("mem", 64, 8) val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) - var program = prog( + val program = prog( proc("main", block("operations", locAssign1, // R6 = R0 + 4 @@ -352,7 +352,7 @@ class DSATest extends AnyFunSuite { val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) val locAssign3 = Assign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(8, 64)), Some("00005")) - var program = prog( + val program = prog( proc("main", block("operations", locAssign1, // R6 = R0 + 4 @@ -379,7 +379,7 @@ class DSATest extends AnyFunSuite { val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) val locAssign3 = Assign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(7, 64)), Some("00005")) - var program = prog( + val program = prog( proc("main", block("operations", locAssign1, @@ -411,7 +411,7 @@ class DSATest extends AnyFunSuite { val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) val locAssign3 = Assign(R5, R7, Some("00005")) - var program = prog( + val program = prog( proc("main", block("operations", // Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), @@ -464,7 +464,7 @@ class DSATest extends AnyFunSuite { // all three load value of x // the analysis doesn't know if x is a pointer or not therefore assumes it is for soundness // arbitrary pointer is used in arithmetic causing collapse - assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.get.collapsed) + assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652)).node.cells(0)).getPointee).node.get.collapsed) ) } @@ -503,7 +503,7 @@ class DSATest extends AnyFunSuite { assertJumptable2Globals(dsg) // bu x now should be collapsed since it was collapsed in callees - assert(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)).getPointee.node.collapsed) + assert(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4)).node.cells(0)).getPointee.node.collapsed) } @@ -609,18 +609,18 @@ class DSATest extends AnyFunSuite { // initial global mappings - assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24))._1.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36))._1.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8))._1.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124)).node.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36)).node.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76)).node.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76)).node.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36)).node.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268)).node.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36)).node.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4)).node.cells(0)))) // bu - assert(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4))._1.cells(0)).getPointee.node.collapsed) + assert(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4)).node.cells(0)).getPointee.node.collapsed) } @@ -653,7 +653,7 @@ class DSATest extends AnyFunSuite { // all three load value of x // the analysis doesn't know if x is a pointer or not therefore assumes it is for soundness // arbitrary pointer is used in arithmetic causing collapse - assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652))._1.cells(0)).getPointee).node.get.collapsed) + assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652)).node.cells(0)).getPointee).node.get.collapsed) ) } From 6e25419dc3152a41c9cb215211cda1e5b65d2c89 Mon Sep 17 00:00:00 2001 From: l-kent Date: Mon, 14 Oct 2024 11:32:58 +1000 Subject: [PATCH 071/104] deprecate DSA test that used removed example, update DSA tests to use newer jumptable2 example, DSA test cleanup --- src/test/scala/DSATest.scala | 370 ++++++++--------------------------- 1 file changed, 86 insertions(+), 284 deletions(-) diff --git a/src/test/scala/DSATest.scala b/src/test/scala/DSATest.scala index ffbd55c3d..6d6a590fc 100644 --- a/src/test/scala/DSATest.scala +++ b/src/test/scala/DSATest.scala @@ -1,10 +1,9 @@ import analysis.{AddressRange, DSC, DSG, DSN, DataLocation, HeapLocation} -import ir.Endian.BigEndian -import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, CFGPosition, DirectCall, Memory, MemoryAssign, MemoryLoad, Program, Register, SharedMemory, cilvisitor, transforms} +import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, CFGPosition, DirectCall, Endian, Memory, MemoryAssign, MemoryLoad, Program, Register, SharedMemory, cilvisitor, transforms} import org.scalatest.funsuite.AnyFunSuite import ir.dsl.* import specification.Specification -import util.{BASILConfig, BoogieGeneratorConfig, ILLoadingConfig, IRContext, RunUtils, StaticAnalysisConfig, StaticAnalysisContext} +import util.{BASILConfig, BASILResult, BoogieGeneratorConfig, ILLoadingConfig, IRContext, RunUtils, StaticAnalysisConfig, StaticAnalysisContext} /** * This is the test suite for testing DSA functionality @@ -30,7 +29,25 @@ class DSATest extends AnyFunSuite { RunUtils.staticAnalysis(StaticAnalysisConfig(), emptyContext) } + def runTest(path: String): BASILResult = { + RunUtils.loadAndTranslate( + BASILConfig( + loading = ILLoadingConfig( + inputFile = path + ".adt", + relfFile = path + ".relf", + specFile = None, + dumpIL = None, + ), + staticAnalysis = Some(StaticAnalysisConfig()), + boogieTranslation = BoogieGeneratorConfig(), + outputPrefix = "boogie_out", + ) + ) + } + // Local DSA tests + /* + TODO - rewrite this test with a new input that is more suitable than the removed example test("basic pointer") { val results = RunUtils.loadAndTranslate( BASILConfig( @@ -50,12 +67,9 @@ class DSATest extends AnyFunSuite { // the dsg of the main procedure after the local phase val dsg = results.analysis.get.locals.get(program.mainProcedure) - - // dsg.formals(R29) is the slice representing formal R29 val R29formal = dsg.adjust(dsg.formals(R29)) - // cells representing the stack at various offsets val stack0 = dsg.find(dsg.stackMapping(0).cells(0)) // R31 val stack8 = dsg.find(dsg.stackMapping(8).cells(0)) // R31 + 8 @@ -66,7 +80,6 @@ class DSATest extends AnyFunSuite { assert(dsg.adjust(stack0.getPointee).equals(R29formal)) // R31 points to the frame pointer assert(dsg.adjust(stack8.getPointee).equals(dsg.adjust(dsg.formals(R30)))) // R31 + 8 points to the link register - /* Position 0000044F: tmp1 := R31 + 24 // Ev(tmp1) = new Node(R31 + 24).0 implicit normalisation: tmp2 := R31 + 32 // Ev(tmp2) = new Node(R31 + 32).0 @@ -81,7 +94,6 @@ class DSATest extends AnyFunSuite { which merges make the stack + 24 point to itself */ - // R31 + 32 points to R31 + 24, later set to point to heap but it should point to both ( assert(dsg.adjust(stack32.getPointee).equals(stack24)) assert(stack24.node.get.collapsed) // 00000497 collapses stack24 concatenation is currently unhandled, any objects referenced in an unhandled operation are collapsed @@ -91,79 +103,48 @@ class DSATest extends AnyFunSuite { assert(dsg.find(dsg.adjust(stack40.getPointee)).equals(dsg.find(dsg.adjust(dsg.find(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69600)).node.cells(0).getPointee)).getPointee)))) } + */ // this function asserts universal properties about global objects in Jumptable2 example - def assertJumptable2Globals(dsg: DSG) : Unit = { + def assertJumptable2Globals(dsg: DSG): Unit = { // global mappings - // __libc_csu_init relocation - assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124)).node.cells(0)))) - // __lib_csu_fini relocation - assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268)).node.cells(0)))) - // jumptable relocation - assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(0)))) + // jump_table relocation + assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69624 + 8)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(0)))) // add_two relocation - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36)).node.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1940, 1940 + 36)).node.cells(0)))) // add_six relocation - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36)).node.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1976, 1976 + 36)).node.cells(0)))) // sub_seven relocation - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36)).node.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2012, 2012 + 36)).node.cells(0)))) // main relocation - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76)).node.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69608 + 8)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2048, 2048 + 76)).node.cells(0)))) // x relocation - assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4)).node.cells(0)))) + assert(dsg.adjust(dsg.globalMapping(AddressRange(69592, 69592 + 8)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4)).node.cells(0)))) } test("local jumptable2 callees") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - + val results = runTest("src/test/indirect_calls/jumptable2/gcc_pic/jumptable2") val program = results.ir.program - // test that all three calles have the same local graph + // test that all three callees have the same local graph val callees = Set("sub_seven", "add_two", "add_six") val procs = program.nameToProcedure - callees.foreach( - callee => - val dsg = results.analysis.get.locals.get(procs(callee)) - assert(dsg.stackMapping.isEmpty) // stack is not used in either callee - assertJumptable2Globals(dsg) // globals should be the same everywhere unused in callees - // x should point to a collapsed object, in all 3 functions - // all three load value of x - // the analysis doesn't know if x is a pointer or not therefore assumes it is for soundness - // arbitrary pointer is used in arithmetic causing collapse - assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652)).node.cells(0)).getPointee).node.get.collapsed) - ) - + callees.foreach { callee => + val dsg = results.analysis.get.locals.get(procs(callee)) + assert(dsg.stackMapping.isEmpty) // stack is not used in either callee + assertJumptable2Globals(dsg) // globals should be the same everywhere unused in callees + // x should point to a collapsed object, in all 3 functions + // all three load value of x + // the analysis doesn't know if x is a pointer or not therefore assumes it is for soundness + // arbitrary pointer is used in arithmetic causing collapse + assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4)).node.cells(0)).getPointee).node.get.collapsed) + } } test("local jumptable2 main") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - + val results = runTest("src/test/indirect_calls/jumptable2/gcc_pic/jumptable2") val program = results.ir.program val dsg = results.analysis.get.locals.get(program.mainProcedure) @@ -181,25 +162,10 @@ class DSATest extends AnyFunSuite { // x should not be collapsed in the main function's local graph assert(!dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4)).node.cells(0)).getPointee.node.collapsed) - - } test("unsafe pointer arithmetic") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "src/test/dsa/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.adt", - relfFile = "src/test/dsa/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - + val results = runTest("src/test/dsa/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic") val program = results.ir.program val dsg = results.analysis.get.locals.get(program.mainProcedure) @@ -213,19 +179,15 @@ class DSATest extends AnyFunSuite { val stack48 = dsg.adjust(dsg.find(dsg.stackMapping(48).cells(0)).getPointee) val stack56 = dsg.adjust(dsg.find(dsg.stackMapping(56).cells(0)).getPointee) - assert(stack0.equals(dsg.adjust(dsg.formals(R29)))) assert(stack8.equals(dsg.adjust(dsg.formals(R30)))) - - // stack24 and stack32 should point to the beginning of first Malloc (size 20) assert(stack24.equals(stack32)) assert(stack24.offset == 0) assert(stack24.node.get.allocationRegions.size == 1) assert(stack24.node.get.allocationRegions.head.asInstanceOf[HeapLocation].size == 20) - // stack24 and stack40 should be pointing to the same cell at different internal offsets val unadjustedStack24Pointee = dsg.find(dsg.stackMapping(24).cells(0)).getPointee val unadjustedStack40Pointee = dsg.find(dsg.stackMapping(40).cells(0)).getPointee @@ -238,28 +200,13 @@ class DSATest extends AnyFunSuite { assert(stack48.node.get.allocationRegions.size == 1) assert(stack48.node.get.allocationRegions.head.asInstanceOf[HeapLocation].size == 8) - // stack 48 points to a malloc address which point to the pointee of stack40 and stack56 assert(dsg.adjust(stack48.getPointee).equals(stack40)) assert(dsg.adjust(stack48.getPointee).equals(stack56)) - - } test("interproc pointer arithmetic main") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", - relfFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) + val results = runTest("src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic") val program = results.ir.program val dsg = results.analysis.get.locals.get(program.mainProcedure) val stack0 = dsg.adjust(dsg.find(dsg.stackMapping(0).cells(0)).getPointee) @@ -282,19 +229,7 @@ class DSATest extends AnyFunSuite { } test("interproc pointer arithmetic callee") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", - relfFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) + val results = runTest("src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic") val program = results.ir.program val dsg = results.analysis.get.locals.get(program.nameToProcedure("callee")) val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) @@ -318,8 +253,8 @@ class DSATest extends AnyFunSuite { block("operations", locAssign1, // R6 = R0 + 4 locAssign2, // R7 = R0 + 5 - MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), // *R7 = R1, (*R6 + 1) = R1 - MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), // *R6 = R2 + MemoryAssign(mem, R7, R1, Endian.BigEndian, 64, Some("00003")), // *R7 = R1, (*R6 + 1) = R1 + MemoryAssign(mem, R6, R2, Endian.BigEndian, 64, Some("00004")), // *R6 = R2 ret ) ) @@ -343,7 +278,6 @@ class DSATest extends AnyFunSuite { // R6 (or R7)'s pointee should be the same as R1 and R2 assert(dsg.adjust(dsg.varToCell(locAssign1)(R6)).pointee.isDefined) assert(dsg.adjust(dsg.adjust(dsg.varToCell(locAssign1)(R6)).getPointee).equals(dsg.adjust(dsg.formals(R1)))) - } test("offsetting from middle of cell to a new cell") { @@ -357,8 +291,8 @@ class DSATest extends AnyFunSuite { block("operations", locAssign1, // R6 = R0 + 4 locAssign2, // R7 = R0 + 5 - MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), - MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), + MemoryAssign(mem, R7, R1, Endian.BigEndian, 64, Some("00003")), + MemoryAssign(mem, R6, R2, Endian.BigEndian, 64, Some("00004")), locAssign3, // R5 = R7 + 8 ret ) @@ -384,8 +318,8 @@ class DSATest extends AnyFunSuite { block("operations", locAssign1, locAssign2, - MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), - MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), + MemoryAssign(mem, R7, R1, Endian.BigEndian, 64, Some("00003")), + MemoryAssign(mem, R6, R2, Endian.BigEndian, 64, Some("00004")), locAssign3, ret ) @@ -417,8 +351,8 @@ class DSATest extends AnyFunSuite { // Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), locAssign1, locAssign2, - MemoryAssign(mem, R7, R1, BigEndian, 64, Some("00003")), - MemoryAssign(mem, R6, R2, BigEndian, 64, Some("00004")), + MemoryAssign(mem, R7, R1, Endian.BigEndian, 64, Some("00003")), + MemoryAssign(mem, R6, R2, Endian.BigEndian, 64, Some("00004")), locAssign3, ret ) @@ -436,56 +370,26 @@ class DSATest extends AnyFunSuite { // this is the same as local graphs // nothing should be changed // TODO count point-to relations and ensure no more constraints are added in this phase - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - + val results = runTest("src/test/indirect_calls/jumptable2/gcc_pic/jumptable2") val program = results.ir.program - // test that all three calles have the same local graph + // test that all three callees have the same local graph val callees = Set("sub_seven", "add_two", "add_six") val procs = program.nameToProcedure - callees.foreach( - callee => - val dsg = results.analysis.get.bus.get(procs(callee)) - assert(dsg.stackMapping.isEmpty) // stack is not used in either callee - assertJumptable2Globals(dsg) // globals should be the same everywhere unused in callees - // x should point to a collapsed object, in all 3 functions - // all three load value of x - // the analysis doesn't know if x is a pointer or not therefore assumes it is for soundness - // arbitrary pointer is used in arithmetic causing collapse - assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652)).node.cells(0)).getPointee).node.get.collapsed) - ) - + callees.foreach { callee => + val dsg = results.analysis.get.bus.get(procs(callee)) + assert(dsg.stackMapping.isEmpty) // stack is not used in either callee + assertJumptable2Globals(dsg) // globals should be the same everywhere unused in callees + // x should point to a collapsed object, in all 3 functions + // all three load value of x + // the analysis doesn't know if x is a pointer or not therefore assumes it is for soundness + // arbitrary pointer is used in arithmetic causing collapse + assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4)).node.cells(0)).getPointee).node.get.collapsed) + } } - test("bottom up jumptable2 main") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - - + val results = runTest("src/test/indirect_calls/jumptable2/gcc_pic/jumptable2") val program = results.ir.program val dsg = results.analysis.get.bus.get(program.mainProcedure) @@ -498,32 +402,16 @@ class DSATest extends AnyFunSuite { assert(dsg.adjust(stack16.getPointee).equals(dsg.adjust(dsg.formals(R1)))) assert(dsg.adjust(stack28.getPointee).equals(dsg.adjust(dsg.formals(R0)))) - // initial global mappings assertJumptable2Globals(dsg) - // bu x now should be collapsed since it was collapsed in callees + // bottom-up x now should be collapsed since it was collapsed in callees assert(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4)).node.cells(0)).getPointee.node.collapsed) - } - - ignore("bottom up interproc pointer arithmetic callee") { // same as interproc pointer arithmetic callee's local graph (no changes should have been made) - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", - relfFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) + val results = runTest("src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic") val program = results.ir.program val dsg = results.analysis.get.bus.get(program.nameToProcedure("callee")) val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) @@ -532,24 +420,10 @@ class DSATest extends AnyFunSuite { assert(stack8.equals(dsg.adjust(dsg.formals(R0)))) assert(stack8.offset == 0) assert(stack24.equals(dsg.adjust(dsg.formals(R0)).node.get.cells(16))) - } - test("bottom up interproc pointer arithmetic main") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", - relfFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) + val results = runTest("src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic") val program = results.ir.program val dsg = results.analysis.get.bus.get(program.mainProcedure) @@ -559,7 +433,6 @@ class DSATest extends AnyFunSuite { val stack32 = dsg.adjust(dsg.find(dsg.stackMapping(32).cells(0)).getPointee) val stack40 = dsg.adjust(dsg.find(dsg.stackMapping(40).cells(0)).getPointee) - // same as the local graph with the difference that stack40 points to cell at // a different of the same node as pointees of stack32 and stack24 assert(stack0.equals(dsg.adjust(dsg.formals(R29)))) @@ -573,27 +446,12 @@ class DSATest extends AnyFunSuite { assert(dsg.find(dsg.stackMapping(40).cells(0)).getPointee.internalOffset == 0) assert(dsg.find(dsg.stackMapping(32).cells(0)).getPointee.internalOffset == 0) assert(dsg.find(dsg.stackMapping(24).cells(0)).getPointee.internalOffset == 0) - } - // top down tests ignore("top down jumptable2 main") { // no changes should be made from previous phase - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - + val results = runTest("src/test/indirect_calls/jumptable2/gcc_pic/jumptable2") val program = results.ir.program val dsg = results.analysis.get.tds.get(program.mainProcedure) // assert(dsg.pointTo.size == 13) // 13 @@ -606,74 +464,34 @@ class DSATest extends AnyFunSuite { assert(dsg.adjust(stack8.getPointee).equals(dsg.adjust(dsg.formals(R30)))) assert(dsg.adjust(stack16.getPointee).equals(dsg.adjust(dsg.formals(R1)))) assert(dsg.adjust(stack28.getPointee).equals(dsg.adjust(dsg.formals(R0)))) + assertJumptable2Globals(dsg) - - // initial global mappings - assert(dsg.adjust(dsg.globalMapping(AddressRange(69600, 69608)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2136, 2136 + 124)).node.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1948, 1948 + 36)).node.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69624, 69632)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76)).node.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69608, 69616)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2056, 2056 + 76)).node.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(8).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(1984, 1984 + 36)).node.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69560, 69568)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2264, 2268)).node.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69656, 69656 + 24)).node.cells(16).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(2020, 2020 + 36)).node.cells(0)))) - assert(dsg.adjust(dsg.globalMapping(AddressRange(69584, 69584 + 8)).node.cells(0).getPointee).equals(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4)).node.cells(0)))) - - // bu + // bottom-up assert(dsg.find(dsg.globalMapping(AddressRange(69648, 69648 + 4)).node.cells(0)).getPointee.node.collapsed) } ignore("top down jumptable2 callees") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "examples/jumptable2/jumptable2.adt", - relfFile = "examples/jumptable2/jumptable2.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) - + val results = runTest("src/test/indirect_calls/jumptable2/gcc_pic/jumptable2") val program = results.ir.program // test that all three callees have the same local graph val callees = Set("sub_seven", "add_two", "add_six") val procs = program.nameToProcedure - callees.foreach( - callee => - val dsg = results.analysis.get.tds.get(procs(callee)) - assert(dsg.stackMapping.isEmpty) // stack is not used in either callee - assertJumptable2Globals(dsg) // globals should be the same everywhere unused in callees - // x should point to a collapsed object, in all 3 functions - // all three load value of x - // the analysis doesn't know if x is a pointer or not therefore assumes it is for soundness - // arbitrary pointer is used in arithmetic causing collapse - assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652)).node.cells(0)).getPointee).node.get.collapsed) - ) - + callees.foreach { callee => + val dsg = results.analysis.get.tds.get(procs(callee)) + assert(dsg.stackMapping.isEmpty) // stack is not used in either callee + assertJumptable2Globals(dsg) // globals should be the same everywhere unused in callees + // x should point to a collapsed object, in all 3 functions + // all three load value of x + // the analysis doesn't know if x is a pointer or not therefore assumes it is for soundness + // arbitrary pointer is used in arithmetic causing collapse + assert(dsg.adjust(dsg.find(dsg.globalMapping(AddressRange(69648, 69652)).node.cells(0)).getPointee).node.get.collapsed) + } } - - test("top down interproc pointer arithmetic callee") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", - relfFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) + val results = runTest("src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic") val program = results.ir.program val dsg = results.analysis.get.tds.get(program.nameToProcedure("callee")) @@ -684,29 +502,14 @@ class DSATest extends AnyFunSuite { assert(stack8.equals(dsg.adjust(dsg.formals(R0)))) assert(stack8.offset == 16) assert(stack24.equals(dsg.adjust(dsg.formals(R0)).node.get.cells(32))) - } - - // top down phase should be the same as bu phase + // top-down phase should be the same as bottom-up phase ignore("top down interproc pointer arithmetic main") { - val results = RunUtils.loadAndTranslate( - BASILConfig( - loading = ILLoadingConfig( - inputFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.adt", - relfFile = "src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic.relf", - specFile = None, - dumpIL = None, - ), - staticAnalysis = Some(StaticAnalysisConfig()), - boogieTranslation = BoogieGeneratorConfig(), - outputPrefix = "boogie_out", - ) - ) + val results = runTest("src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic") val program = results.ir.program val dsg = results.analysis.get.tds.get(program.mainProcedure) - val stack0 = dsg.adjust(dsg.find(dsg.stackMapping(0).cells(0)).getPointee) val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) val stack24 = dsg.adjust(dsg.find(dsg.stackMapping(24).cells(0)).getPointee) @@ -724,7 +527,6 @@ class DSATest extends AnyFunSuite { assert(dsg.find(dsg.stackMapping(40).cells(0)).getPointee.internalOffset == 0) assert(dsg.find(dsg.stackMapping(32).cells(0)).getPointee.internalOffset == 0) assert(dsg.find(dsg.stackMapping(24).cells(0)).getPointee.internalOffset == 0) - } } From c0cf06405dd0389d223c85642284591ef8bb56d4 Mon Sep 17 00:00:00 2001 From: l-kent Date: Mon, 14 Oct 2024 12:03:06 +1000 Subject: [PATCH 072/104] put DSA in its own package, rename classes for clarity, put DSG/Graph in its own file since it's so big --- .../DataStructureAnalysis.scala} | 60 +-- .../Graph.scala} | 419 ++++-------------- .../LocalPhase.scala} | 55 ++- .../SymbolicAddressAnalysis.scala | 15 +- .../data_structure_analysis/Utility.scala | 253 +++++++++++ .../analysis/solvers/DSAUnionFindSolver.scala | 5 +- src/main/scala/util/RunUtils.scala | 43 +- ....scala => DataStructureAnalysisTest.scala} | 16 +- 8 files changed, 439 insertions(+), 427 deletions(-) rename src/main/scala/analysis/{DSA.scala => data_structure_analysis/DataStructureAnalysis.scala} (80%) rename src/main/scala/analysis/{DSAUtility.scala => data_structure_analysis/Graph.scala} (67%) rename src/main/scala/analysis/{LocalDSA.scala => data_structure_analysis/LocalPhase.scala} (90%) rename src/main/scala/analysis/{ => data_structure_analysis}/SymbolicAddressAnalysis.scala (91%) create mode 100644 src/main/scala/analysis/data_structure_analysis/Utility.scala rename src/test/scala/{DSATest.scala => DataStructureAnalysisTest.scala} (97%) diff --git a/src/main/scala/analysis/DSA.scala b/src/main/scala/analysis/data_structure_analysis/DataStructureAnalysis.scala similarity index 80% rename from src/main/scala/analysis/DSA.scala rename to src/main/scala/analysis/data_structure_analysis/DataStructureAnalysis.scala index 3078ba66c..8f46c4173 100644 --- a/src/main/scala/analysis/DSA.scala +++ b/src/main/scala/analysis/data_structure_analysis/DataStructureAnalysis.scala @@ -1,6 +1,7 @@ -package analysis +package analysis.data_structure_analysis -import ir.{BitVecLiteral, BitVecType, CFGPosition, CallGraph, Procedure, Program, Register, Variable, computeDomain, IRWalk} +import analysis.* +import ir.* import specification.{ExternalFunction, SpecGlobal, SymbolTableEntry} import scala.collection.mutable @@ -19,19 +20,20 @@ import scala.collection.mutable * @param writesTo mapping from procedures to registers they change * @param params mapping from procedures to their parameters */ -class DSA(program: Program, - symResults: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], - constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - globals: Set[SymbolTableEntry], globalOffsets: Map[BigInt, BigInt], - externalFunctions: Set[ExternalFunction], - reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], - writesTo: Map[Procedure, Set[Register]], - params: Map[Procedure, Set[Variable]] - ) extends Analysis[Map[Procedure, DSG]] { - - val locals: mutable.Map[Procedure, DSG] = mutable.Map() - val bu: mutable.Map[Procedure, DSG] = mutable.Map() - val td: mutable.Map[Procedure, DSG] = mutable.Map() +class DataStructureAnalysis(program: Program, + symResults: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], + constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + globals: Set[SymbolTableEntry], + globalOffsets: Map[BigInt, BigInt], + externalFunctions: Set[ExternalFunction], + reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], + writesTo: Map[Procedure, Set[Register]], + params: Map[Procedure, Set[Variable]] + ) extends Analysis[Map[Procedure, Graph]] { + + val local: mutable.Map[Procedure, Graph] = mutable.Map() + val bottomUp: mutable.Map[Procedure, Graph] = mutable.Map() + val topDown: mutable.Map[Procedure, Graph] = mutable.Map() private val stackPointer = Register("R31", 64) private val returnPointer = Register("R30", 64) @@ -51,7 +53,7 @@ class DSA(program: Program, private var visited = Set[Procedure]() private val queue = mutable.Queue[Procedure]() - override def analyze(): Map[Procedure, DSG] = { + override def analyze(): Map[Procedure, Graph] = { var domain: Set[Procedure] = Set(program.mainProcedure) val stack: mutable.Stack[Procedure] = mutable.Stack() stack.pushAll(program.mainProcedure.calls) @@ -65,16 +67,16 @@ class DSA(program: Program, // perform local analysis on all procs domain.foreach { proc => - val dsg = LocalDSA(proc, symResults, constProp, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params).analyze() + val dsg = LocalPhase(proc, symResults, constProp, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params).analyze() - locals.update(proc, dsg) - bu.update(proc, dsg.cloneSelf()) + local.update(proc, dsg) + bottomUp.update(proc, dsg.cloneSelf()) } val leafNodes = findLeaf(program.mainProcedure) leafNodes.foreach { proc => - assert(locals(proc).callsites.isEmpty) + assert(local(proc).callsites.isEmpty) visited += proc //val preds: Set[Procedure] = CallGraph.pred(proc) queue.enqueueAll(CallGraph.pred(proc).diff(visited).intersect(domain)) @@ -83,15 +85,15 @@ class DSA(program: Program, // bottom up phase while queue.nonEmpty do var proc = queue.dequeue() - while !locals.contains(proc) && queue.nonEmpty do proc = queue.dequeue() + while !local.contains(proc) && queue.nonEmpty do proc = queue.dequeue() visited += proc - if locals.contains(proc) then + if local.contains(proc) then queue.enqueueAll(CallGraph.pred(proc).diff(visited)) - val buGraph = bu(proc) + val buGraph = bottomUp(proc) buGraph.callsites.foreach { callSite => val callee = callSite.proc - val calleeGraph = locals(callee) //.cloneSelf() + val calleeGraph = local(callee) //.cloneSelf() assert(buGraph.globalMapping.keySet.equals(calleeGraph.globalMapping.keySet)) assert(calleeGraph.formals.keySet.diff(ignoreRegisters).equals(callSite.paramCells.keySet)) calleeGraph.globalMapping.values.foreach { field => @@ -120,7 +122,7 @@ class DSA(program: Program, // assert(calleeGraph.formals.isEmpty || buGraph.varToCell(begin(callee)).equals(calleeGraph.formals)) calleeGraph.globalMapping.foreach { - case (range: AddressRange, Field(node: DSN, offset: BigInt)) => + case (range: AddressRange, Field(node: Node, offset: BigInt)) => val field = calleeGraph.find(node) buGraph.mergeCells( buGraph.globalMapping(range).node.getCell(buGraph.globalMapping(range).offset), @@ -149,7 +151,7 @@ class DSA(program: Program, // bottom up phase finished // clone bu graphs to top-down graphs domain.foreach { proc => - td.update(proc, bu(proc).cloneSelf()) + topDown.update(proc, bottomUp(proc).cloneSelf()) } queue.enqueue(program.mainProcedure) @@ -160,10 +162,10 @@ class DSA(program: Program, val proc = queue.dequeue() visited += proc queue.enqueueAll(CallGraph.succ(proc).diff(visited)) - val callersGraph = td(proc) + val callersGraph = topDown(proc) callersGraph.callsites.foreach { callSite => val callee = callSite.proc - val calleesGraph = td(callee) + val calleesGraph = topDown(callee) assert(callersGraph.globalMapping.keySet.equals(calleesGraph.globalMapping.keySet)) callersGraph.globalMapping.values.foreach { field => @@ -210,7 +212,7 @@ class DSA(program: Program, } callersGraph.collectNodes() } - td.toMap + topDown.toMap } } diff --git a/src/main/scala/analysis/DSAUtility.scala b/src/main/scala/analysis/data_structure_analysis/Graph.scala similarity index 67% rename from src/main/scala/analysis/DSAUtility.scala rename to src/main/scala/analysis/data_structure_analysis/Graph.scala index 2a7b888dc..e2e40f1c3 100644 --- a/src/main/scala/analysis/DSAUtility.scala +++ b/src/main/scala/analysis/data_structure_analysis/Graph.scala @@ -1,50 +1,45 @@ -package analysis +package analysis.data_structure_analysis -import analysis.solvers.{DSAUniTerm, DSAUnionFindSolver, UnionFindSolver, Var} -import cfg_visualiser.{DotStruct, DotStructElement, StructArrow, StructDotGraph} -import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Expr, Extract, IRWalk, IntraProcIRCursor, Literal, Memory, MemoryAssign, MemoryLoad, Procedure, Register, Repeat, SignExtend, UnaryExpr, Variable, ZeroExtend, computeDomain, toShortString} -import specification.{ExternalFunction, SpecGlobal, SymbolTableEntry} -import util.Logger +import analysis.FlatElement +import analysis.solvers.DSAUnionFindSolver +import analysis.evaluateExpression +import cfg_visualiser.* +import ir.* +import specification.{ExternalFunction, SymbolTableEntry} -import scala.util.control.Breaks.{break, breakable} import scala.collection.mutable import scala.collection.mutable.ArrayBuffer - -object NodeCounter { - private var counter: Int = 0 - - def getCounter: Int = - counter = counter + 1 - counter -} +import scala.util.control.Breaks.{break, breakable} /** - * Data Structure Graph for DSA - * @param proc procedure of DSG - * @param constProp - * @param varToSym mapping flow-sensitive (position sensitive) mapping from registers to their set of symbolic accesses - * @param globals - * @param globalOffsets - * @param externalFunctions - * @param reachingDefs - * @param writesTo - * @param params - */ -class DSG(val proc: Procedure, - constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - varToSym: Map[CFGPosition, Map[Variable, Set[SymbolicAddress]]], - globals: Set[SymbolTableEntry], globalOffsets: Map[BigInt, BigInt], - externalFunctions: Set[ExternalFunction], - val reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], - val writesTo: Map[Procedure, Set[Register]], - val params: Map[Procedure, Set[Variable]] - ) { + * Data Structure Graph for DSA + * + * @param proc procedure of DSG + * @param constProp + * @param varToSym mapping flow-sensitive (position sensitive) mapping from registers to their set of symbolic accesses + * @param globals + * @param globalOffsets + * @param externalFunctions + * @param reachingDefs + * @param writesTo + * @param params + */ +class Graph(val proc: Procedure, + constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + varToSym: Map[CFGPosition, Map[Variable, Set[SymbolicAddress]]], + globals: Set[SymbolTableEntry], + globalOffsets: Map[BigInt, BigInt], + externalFunctions: Set[ExternalFunction], + val reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], + val writesTo: Map[Procedure, Set[Register]], + val params: Map[Procedure, Set[Variable]] + ) { // DSNodes owned by this graph, only updated once analysis is done, - val nodes: mutable.Set[DSN] = mutable.Set() + val nodes: mutable.Set[Node] = mutable.Set() // Points-to relations in this graph, only updated once the analysis is done, - val pointsto: mutable.Map[DSC, Slice] = mutable.Map() + val pointsto: mutable.Map[Cell, Slice] = mutable.Map() // represent callees in proc val callsites: mutable.Set[CallSite] = mutable.Set() @@ -111,13 +106,13 @@ class DSG(val proc: Procedure, } // this is the mapping from offsets/positions on the stack to their representative DS nodes - val stackMapping: mutable.Map[BigInt, DSN] = mutable.Map() + val stackMapping: mutable.Map[BigInt, Node] = mutable.Map() private var lastOffset: BigInt = -1 private var nextValidOffset: BigInt = 0 stackAccesses.keys.toSeq.sorted.foreach { offset => val byteSize = stackAccesses(offset) if offset >= nextValidOffset then - val node = DSN(Some(this), byteSize) + val node = Node(Some(this), byteSize) node.allocationRegions.add(StackLocation(s"Stack_${proc}_$offset", proc, byteSize)) node.flags.stack = true node.addCell(0, byteSize) @@ -134,7 +129,7 @@ class DSG(val proc: Procedure, // creates the globals from the symbol tables val globalMapping = mutable.Map[AddressRange, Field]() globals.foreach { global => - val node = DSN(Some(this), global.size) + val node = Node(Some(this), global.size) node.allocationRegions.add(DataLocation(global.name, global.address, global.size / 8)) node.flags.global = true node.flags.incomplete = true @@ -152,7 +147,7 @@ class DSG(val proc: Procedure, break var field: BigInt = 0 - val node: DSN = isGlobal(relocatedAddress) match + val node: Node = isGlobal(relocatedAddress) match case Some(value) => field = relocatedAddress - value.addressRange.start val node = value.field.node @@ -160,7 +155,7 @@ class DSG(val proc: Procedure, node case None => - val node = DSN(Some(this)) + val node = Node(Some(this)) node.allocationRegions.add(DataLocation(s"Relocated_$relocatedAddress", relocatedAddress, 8)) node.flags.global = true node.flags.incomplete = true @@ -174,7 +169,7 @@ class DSG(val proc: Procedure, } externalFunctions.foreach { external => - val node = DSN(Some(this)) + val node = Node(Some(this)) node.allocationRegions.add(DataLocation(external.name, external.offset, 0)) node.flags.global = true node.flags.incomplete = true @@ -204,8 +199,8 @@ class DSG(val proc: Procedure, /** - * collects all the nodes that are currently in the DSG and updates nodes member variable - */ + * collects all the nodes that are currently in the DSG and updates nodes member variable + */ def collectNodes(): Unit = { nodes.clear() pointsto.clear() @@ -216,7 +211,7 @@ class DSG(val proc: Procedure, nodes.addAll(stackMapping.values.map(n => find(n).node)) nodes.addAll(globalMapping.values.map(n => find(n.node).node)) - val queue: mutable.Queue[DSN] = mutable.Queue() + val queue: mutable.Queue[Node] = mutable.Queue() queue.enqueueAll(nodes) while (queue.nonEmpty) { val cur = queue.dequeue() @@ -293,15 +288,15 @@ class DSG(val proc: Procedure, /** - * Collapses the node causing it to lose field sensitivity - */ - def collapseNode(n: DSN): DSN = { + * Collapses the node causing it to lose field sensitivity + */ + def collapseNode(n: Node): Node = { val (term, _) = solver.findWithOffset(n.term) - val node: DSN = term.node + val node: Node = term.node if (!(n.collapsed || find(n).node.collapsed)) { - val collapsedNode: DSN = DSN(n.graph) - val collapsedCell = DSC(Some(collapsedNode), 0) + val collapsedNode: Node = Node(n.graph) + val collapsedCell = Cell(Some(collapsedNode), 0) n.flags.collapsed = true collapsedNode.flags.collapsed = true @@ -349,10 +344,10 @@ class DSG(val proc: Procedure, /** - * this function merges all the overlapping cells in the given node - * The node DOESN'T lose field sensitivity after this - */ - def selfCollapse(node: DSN): Unit = { + * this function merges all the overlapping cells in the given node + * The node DOESN'T lose field sensitivity after this + */ + def selfCollapse(node: Node): Unit = { var lastOffset: BigInt = -1 var lastAccess: BigInt = -1 val removed = mutable.Set[BigInt]() @@ -372,9 +367,9 @@ class DSG(val proc: Procedure, } /** - * merges two neighbouring cells into one - */ - private def mergeNeighbours(cell1: DSC, cell2: DSC): DSC = { + * merges two neighbouring cells into one + */ + private def mergeNeighbours(cell1: Cell, cell2: Cell): Cell = { require(cell1.node.equals(cell2.node) && cell1.offset < cell2.offset) if (cell2.pointee.isDefined) { if (cell1.pointee.isDefined) { @@ -396,23 +391,23 @@ class DSG(val proc: Procedure, val solver: DSAUnionFindSolver = DSAUnionFindSolver() /** - * wrapper for find functionality of the union-find - * @param node the node to perform find on - * @return a field which is the tuple (parent node of the input node, starting offset of the input node in its parent) - */ - def find(node: DSN): Field = { + * wrapper for find functionality of the union-find + * @param node the node to perform find on + * @return a field which is the tuple (parent node of the input node, starting offset of the input node in its parent) + */ + def find(node: Node): Field = { val (n, offset) = solver.findWithOffset(node.term) val resultNode = n.node Field(resultNode, offset) } /** - * wrapper for find functionality of the union-find - * - * @param cell the cell to perform find on - * @return the input cell's equivalent cell in the parent - */ - def find(cell: DSC): DSC = { + * wrapper for find functionality of the union-find + * + * @param cell the cell to perform find on + * @return the input cell's equivalent cell in the parent + */ + def find(cell: Cell): Cell = { val node = cell.node.get val parent: Field = find(node) parent.node.addCell(cell.offset + parent.offset, cell.largestAccessedSize) @@ -421,12 +416,12 @@ class DSG(val proc: Procedure, def find(slice: Slice): Slice = deadjust(adjust(slice)) /** - * merges two cells and unifies their nodes - * @param cell1 - * @param cell2 - * @return the resulting cell in the unified node - */ - def mergeCells(c1: DSC, c2: DSC): DSC = { + * merges two cells and unifies their nodes + * @param cell1 + * @param cell2 + * @return the resulting cell in the unified node + */ + def mergeCells(c1: Cell, c2: Cell): Cell = { var cell1 = c1 var cell2 = c2 if c1.node.isDefined then @@ -485,12 +480,12 @@ class DSG(val proc: Procedure, val node2CellsOffset = node2.cells.toSeq.map((offset, cell) => (offset + delta, cell)) - val cells: Seq[(BigInt, DSC)] = (node1.cells.toSeq ++ node2CellsOffset).sortBy(_(0)) + val cells: Seq[(BigInt, Cell)] = (node1.cells.toSeq ++ node2CellsOffset).sortBy(_(0)) var lastOffset: BigInt = -1 var lastAccess: Int = -1 // create a new node to represent the unified node - val resultNode = DSN(Some(this)) + val resultNode = Node(Some(this)) // add nodes flags and regions to the resulting node resultNode.allocationRegions.addAll(node1.allocationRegions ++ node2.allocationRegions) resultNode.flags.join(node1.flags) @@ -511,7 +506,7 @@ class DSG(val proc: Procedure, // compute the cells present in the resulting unified node // a mapping from offsets to the set of old cells which are merged to form a cell in the new unified node // values in the mapping also include the largest access size so far computed for each resulting cell - val resultCells = mutable.Map[BigInt, mutable.Set[DSC]]() + val resultCells = mutable.Map[BigInt, mutable.Set[Cell]]() val resultLargestAccesses = mutable.Map[BigInt, Int]() cells.foreach { (offset, cell) => if ((lastOffset + lastAccess > offset) || lastOffset == offset) { // includes this cell @@ -561,19 +556,19 @@ class DSG(val proc: Procedure, resultNode.getCell(cell2.offset) } - def adjust(cell: DSC, internalOffset: BigInt): DSC = { + def adjust(cell: Cell, internalOffset: BigInt): Cell = { val (term, linkOffset) = solver.findWithOffset(cell.node.get.term) val node = term.node node.addCell(cell.offset + internalOffset + linkOffset, 0) } - def adjust(slice: Slice, offset: BigInt = 0): DSC = { + def adjust(slice: Slice, offset: BigInt = 0): Cell = { val cell = slice.cell val internal = slice.internalOffset adjust(cell, internal + offset) } - def deadjust(cell: DSC): Slice = { + def deadjust(cell: Cell): Slice = { val node = cell.node.get val offset = cell.offset selfCollapse(node) @@ -597,21 +592,21 @@ class DSG(val proc: Procedure, case pos @ Assign(variable, value, _) => value.variables.foreach { v => if (isFormal(pos, v)) { - val node = DSN(Some(this)) + val node = Node(Some(this)) node.flags.incomplete = true nodes.add(node) formals.update(v, Slice(node.cells(0), 0)) } } - val node = DSN(Some(this)) + val node = Node(Some(this)) varToCell(pos) = mutable.Map(variable -> Slice(node.cells(0), 0)) case pos @ DirectCall(target, _) if target.name == "malloc" => - val node = DSN(Some(this)) + val node = Node(Some(this)) varToCell(pos) = mutable.Map(mallocRegister -> Slice(node.cells(0), 0)) case pos @ DirectCall(target, _) if writesTo.contains(target) => val result = mutable.Map[Variable, Slice]() writesTo(target).foreach { variable => - val node = DSN(Some(this)) + val node = Node(Some(this)) result(variable) = Slice(node.cells(0), 0) } varToCell(pos) = result @@ -619,7 +614,7 @@ class DSG(val proc: Procedure, unwrapPaddingAndSlicing(expr) match { case value: Variable => if (isFormal(pos, value)) { - val node = DSN(Some(this)) + val node = Node(Some(this)) node.flags.incomplete = true nodes.add(node) formals.update(value, Slice(node.cells(0), 0)) @@ -631,11 +626,11 @@ class DSG(val proc: Procedure, varToCell } - def cloneSelf(): DSG = { - val newGraph = DSG(proc, constProp, varToSym, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params) + def cloneSelf(): Graph = { + val newGraph = Graph(proc, constProp, varToSym, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params) assert(formals.size == newGraph.formals.size) - val nodes = mutable.Set[DSN]() - val idToNode: mutable.Map[Int, DSN] = mutable.Map() + val nodes = mutable.Set[Node]() + val idToNode: mutable.Map[Int, Node] = mutable.Map() formals.foreach { (variable, slice) => // assert(newGraph.formals.contains(variable)) val node = find(slice).node @@ -684,7 +679,7 @@ class DSG(val proc: Procedure, newGraph.globalMapping.update(range, Field(idToNode(field.node.id), field.offset + offset)) } - val queue = mutable.Queue[DSN]() + val queue = mutable.Queue[Node]() queue.addAll(nodes) while (queue.nonEmpty) { val node = queue.dequeue() @@ -726,243 +721,3 @@ class DSG(val proc: Procedure, newGraph } } - -class Flags() { - var collapsed = false - var stack = false - var heap = false - var global = false - var unknown = false - var read = false - var modified = false - var incomplete = false - var foreign = false - - def join(other: Flags): Unit = - collapsed = collapsed || other.collapsed - stack = other.stack || stack - heap = other.heap || heap - global = other.global || global - unknown = other.unknown || unknown - read = other.read || read - modified = other.modified || modified - incomplete = other.incomplete || incomplete - foreign = other.foreign && foreign -} - -/** - * a Data structure Node - */ -class DSN(val graph: Option[DSG], var size: BigInt = 0, val id: Int = NodeCounter.getCounter) { - - val term: DSAUniTerm = DSAUniTerm(this) - val children: mutable.Map[DSN, BigInt] = mutable.Map() -// var collapsed = false - var flags: Flags = Flags() - def collapsed: Boolean = flags.collapsed - - val allocationRegions: mutable.Set[MemoryLocation] = mutable.Set() - - val cells: mutable.Map[BigInt, DSC] = mutable.Map() - this.addCell(0, 0) - - private def updateSize(newSize: BigInt): Unit = { - if newSize > size then - size = newSize - } - - def getCell(offset: BigInt): DSC = { - if (collapsed) { - cells(0) - } else if (!cells.contains(offset)) { - var result: Option[DSC] = None - cells.foreach { (start, cell) => - if (start <= offset && offset < (start + cell.largestAccessedSize)) { - result = Some(cell) - } - } - result match { - case Some(value) => value - case None => ??? - // Logger.warn(s"$this didn't have a cell at offset: $offset. An empty cell was added in") - // addCell(offset, 0) - } - } else { - cells(offset) - } - } - - - def addCell(offset: BigInt, size: Int): DSC = { - this.updateSize(offset + size) - if collapsed then - cells(0) - else if !cells.contains(offset) then - val cell = DSC(Some(this), offset) - cells.update(offset, cell) - cell.growSize(size) - cell - else - cells(offset).growSize(size) - cells(offset) - } - - def cloneSelf(graph: DSG): DSN = { - val node = DSN(Some(graph), this.size) - node.allocationRegions.addAll(this.allocationRegions) - node.flags.join(this.flags) - cells.foreach { (offset, cell) => - node.addCell(offset, cell.largestAccessedSize) - } - node - } - - def cloneNode(from: DSG, to: DSG): Unit = { -// assert(from.nodes.contains(this)) TODO update nodes after each phase for to check this assertion - if (!to.nodes.contains(this)) { - to.nodes.add(this) - - from.varToCell.foreach { (pos, varMap) => - varMap.foreach { (variable, slice) => - if (from.find(slice).node.equals(this)) { - if (to.varToCell.contains(pos)) { - to.varToCell(pos)(variable) = from.find(slice) - } else { - to.varToCell(pos) = mutable.Map(variable -> from.find(slice)) - } - } - } - } - from.formals.foreach { (variable, slice) => - if (from.find(slice).node.equals(this)) { - if (to.varToCell.contains(from.proc)) { - to.varToCell(from.proc)(variable) = from.find(slice) - } else { - to.varToCell(from.proc) = mutable.Map(variable -> from.find(slice)) - } - } - } - cells.values.foreach { cell => - if (cell.pointee.isDefined) { - val pointee = cell.getPointee - pointee.node.cloneNode(from, to) - // to.pointTo.update(cell, pointee) TODO check this is not necessary - } - } - } - } - - override def equals(obj: Any): Boolean = { - obj match - case node: DSN => - this.id == node.id - case _ => false - } - - override def hashCode(): Int = id - - override def toString: String = s"Node($id, $allocationRegions ${if collapsed then ", collapsed" else ""})" - -} - -/** - * a cell in DSA - * @param node the node this cell belongs to - * @param offset the offset of the cell - */ -class DSC(val node: Option[DSN], val offset: BigInt) { - var largestAccessedSize: Int = 0 - - // the cell's pointee - var pointee: Option[Slice] = None - - // returns the cell's pointee if it has one. - // if not it will create a placeholder, set it as the pointee of this cell and return it - def getPointee: Slice = - if pointee.isEmpty then - val node = DSN(Some(this.node.get.graph.get)) - pointee = Some(Slice(node.cells(0), 0)) - else - val graph = pointee.get.node.graph.get - val resolvedPointee = graph.find(graph.adjust(pointee.get)) - - pointee = Some(graph.deadjust(resolvedPointee)) - pointee.get - - def growSize(size: Int): Boolean = - if size > largestAccessedSize then - largestAccessedSize = size - true - else false - - override def equals(obj: Any): Boolean = - obj match - case cell: DSC => this.node.equals(cell.node) && this.offset.equals(cell.offset) - case _ => false - - override def toString: String = s"Cell(${if node.isDefined then node.get.toString else "NONE"}, $offset)" -} - - -/** - * a slice made from a cell and an internal offset - */ -case class Slice(cell: DSC, internalOffset: BigInt) { - def node: DSN = cell.node.get - def offset: BigInt = cell.offset -} - -/** - * represents a direct call in DSA - * @param call instance of the call - * @param graph caller's DSG - */ -class CallSite(val call: DirectCall, val graph: DSG) { - val proc: Procedure = call.target - val paramCells: mutable.Map[Variable, Slice] = graph.params(proc).foldLeft(mutable.Map[Variable, Slice]()) { - (m, reg) => - val node = DSN(Some(graph)) - node.flags.incomplete = true - m += (reg -> Slice(node.cells(0), 0)) - } - val returnCells: mutable.Map[Variable, Slice] = graph.writesTo(proc).foldLeft(mutable.Map[Variable, Slice]()) { - (m, reg) => - val node = DSN(Some(graph)) - node.flags.incomplete = true - m += (reg -> Slice(node.cells(0), 0)) - } -} - -case class DSAGlobal(addressRange: AddressRange, field: Field) { - lazy val start: BigInt = addressRange.start - lazy val end: BigInt = addressRange.end - lazy val node: DSN = field.node - lazy val offset: BigInt = field.offset -} - -// global address range -case class AddressRange(start: BigInt, end: BigInt) - -// a node, offset pair, difference to a cell is that it doesn't represent a DSG construct, -case class Field(node: DSN, offset: BigInt) - -// unwraps internal padding and slicing and returns the expression -def unwrapPaddingAndSlicing(expr: Expr): Expr = - expr match - case literal: Literal => literal - case Repeat(repeats, body) => Repeat(repeats, unwrapPaddingAndSlicing(body)) - case SignExtend(extension, body) => SignExtend(extension, unwrapPaddingAndSlicing(body)) - case UnaryExpr(op, arg) => UnaryExpr(op, arg) - case BinaryExpr(op, arg1, arg2) => BinaryExpr(op, unwrapPaddingAndSlicing(arg1), unwrapPaddingAndSlicing(arg2)) - case MemoryLoad(mem, index, endian, size) => MemoryLoad(mem, unwrapPaddingAndSlicing(index), endian, size) - case variable: Variable => variable - case Extract(_, _, body) /*if start == 0 && end == 32*/ => unwrapPaddingAndSlicing(body) // this may make it unsound - case ZeroExtend(_, body) => unwrapPaddingAndSlicing(body) - case _ => expr - - - - - - - diff --git a/src/main/scala/analysis/LocalDSA.scala b/src/main/scala/analysis/data_structure_analysis/LocalPhase.scala similarity index 90% rename from src/main/scala/analysis/LocalDSA.scala rename to src/main/scala/analysis/data_structure_analysis/LocalPhase.scala index cbb0ed368..390a53d34 100644 --- a/src/main/scala/analysis/LocalDSA.scala +++ b/src/main/scala/analysis/data_structure_analysis/LocalPhase.scala @@ -1,13 +1,14 @@ -package analysis +package analysis.data_structure_analysis import analysis.BitVectorEval.{bv2SignedInt, isNegative} -import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, DirectCall, Endian, Expr, Extract, IntraProcIRCursor, MemoryAssign, MemoryLoad, Procedure, Register, Variable, ZeroExtend, computeDomain, toShortString} +import analysis.* +import ir.* import specification.{ExternalFunction, SpecGlobal, SymbolTableEntry} import util.writeToFile -import scala.util.control.Breaks.{break, breakable} import java.math.BigInteger import scala.collection.mutable +import scala.util.control.Breaks.{break, breakable} /** * The local phase of Data Structure Analysis @@ -21,16 +22,15 @@ import scala.collection.mutable * @param writesTo mapping from procedures to registers they change * @param params mapping from procedures to their parameters */ -class LocalDSA( - proc: Procedure, - symResults: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], - constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - globals: Set[SymbolTableEntry], globalOffsets: Map[BigInt, BigInt], - externalFunctions: Set[ExternalFunction], - reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], - writesTo: Map[Procedure, Set[Register]], - params: Map[Procedure, Set[Variable]] - ) extends Analysis[Any] { +class LocalPhase(proc: Procedure, + symResults: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], + constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + globals: Set[SymbolTableEntry], globalOffsets: Map[BigInt, BigInt], + externalFunctions: Set[ExternalFunction], + reachingDefs: Map[CFGPosition, Map[Variable, Set[CFGPosition]]], + writesTo: Map[Procedure, Set[Register]], + params: Map[Procedure, Set[Variable]] + ) extends Analysis[Any] { private val mallocRegister = Register("R0", 64) private val stackPointer = Register("R31", 64) @@ -60,7 +60,7 @@ class LocalDSA( position -> newMap } - private def getStack(offset: BigInt): DSC = { + private def getStack(offset: BigInt): Cell = { var last: BigInt = 0 if graph.stackMapping.contains(offset) then graph.stackMapping(offset).cells(0) @@ -79,12 +79,11 @@ class LocalDSA( graph.stackMapping(last).getCell(diff) } - /** * if an expr is the address of a stack location return its corresponding cell * @param pos IL position where the expression is used */ - private def isStack(expr: Expr, pos: CFGPosition): Option[DSC] = { + private def isStack(expr: Expr, pos: CFGPosition): Option[Cell] = { expr match case BinaryExpr(_, arg1: Variable, arg2) if varToSym.contains(pos) && varToSym(pos).contains(arg1) && varToSym(pos)(arg1).exists(s => s.symbolicBase.isInstanceOf[StackLocation]) => @@ -115,13 +114,13 @@ class LocalDSA( s"malloc_$mallocCount" } - val graph: DSG = DSG(proc, constProp, varToSym, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params) - + val graph: Graph = Graph(proc, constProp, varToSym, globals, globalOffsets, externalFunctions, reachingDefs, writesTo, params) + /** * if an expr is the address of a global location return its corresponding cell * @param pos IL position where the expression is used */ - def isGlobal(expr: Expr, pos: CFGPosition, size: Int = 0): Option[DSC] = { + def isGlobal(expr: Expr, pos: CFGPosition, size: Int = 0): Option[Cell] = { val value = evaluateExpression(expr, constProp(pos)) if value.isDefined then val global = graph.isGlobal(value.get.value) @@ -152,7 +151,7 @@ class LocalDSA( * @param offset offset if [+ offset] is present * @return the cell resulting from the unification */ - private def visitPointerArithmeticOperation(position: CFGPosition, lhs: DSC, rhs: Variable, size: Int, pointee: Boolean = false, offset: BigInt = 0, collapse: Boolean = false): DSC = + private def visitPointerArithmeticOperation(position: CFGPosition, lhs: Cell, rhs: Variable, size: Int, pointee: Boolean = false, offset: BigInt = 0, collapse: Boolean = false): Cell = // visit all the defining pointer operation on rhs variable first reachingDefs(position)(rhs).foreach(visit) // get the cells of all the SSA variables in the set @@ -203,7 +202,7 @@ class LocalDSA( /** * handles unsupported pointer arithmetic by collapsing all the nodes invloved */ - private def unsupportedPointerArithmeticOperation(n: CFGPosition, expr: Expr, lhsCell: DSC): DSC = { + private def unsupportedPointerArithmeticOperation(n: CFGPosition, expr: Expr, lhsCell: Cell): Cell = { val cell = expr.variables.foldLeft(lhsCell) { (c, v) => val cells: Set[Slice] = graph.getCells(n, v) @@ -231,7 +230,7 @@ class LocalDSA( val size: BigInt = evaluateExpression(mallocRegister, constProp(n)) match case Some(value) => value.value case None => 0 - val node = DSN(Some(graph), size) + val node = Node(Some(graph), size) node.allocationRegions.add(HeapLocation(nextMallocCount, target, size)) node.flags.heap = true graph.mergeCells(graph.varToCell(n)(mallocRegister).cell, node.cells(0)) @@ -295,7 +294,7 @@ class LocalDSA( // assert(varToSym(n).contains(arg1)) // collapse the result // visitPointerArithmeticOperation(n, lhsCell, arg1, byteSize, true, 0, true) - unsupportedPointerArithmeticOperation(n, index,DSN(Some(graph)).cells(0)) + unsupportedPointerArithmeticOperation(n, index,Node(Some(graph)).cells(0)) case arg: Variable => // assert(varToSym(n).contains(arg)) visitPointerArithmeticOperation(n, lhsCell, arg, byteSize, true) @@ -314,7 +313,7 @@ class LocalDSA( val byteSize = size / 8 val global = isGlobal(index, n, byteSize) val stack = isStack(index, n) - val addressPointee: DSC = + val addressPointee: Cell = if global.isDefined then graph.adjust(graph.find(global.get).getPointee) else if stack.isDefined then @@ -326,15 +325,15 @@ class LocalDSA( case Some(v) => // assert(varToSym(n).contains(arg1)) val offset = v.value - visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg1, byteSize, true, offset) + visitPointerArithmeticOperation(n, Node(Some(graph)).cells(0), arg1, byteSize, true, offset) case None => // assert(varToSym(n).contains(arg1)) // collapse the results // visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg1, byteSize, true, 0, true) - unsupportedPointerArithmeticOperation(n, index, DSN(Some(graph)).cells(0)) + unsupportedPointerArithmeticOperation(n, index, Node(Some(graph)).cells(0)) case arg: Variable => // assert(varToSym(n).contains(arg)) - visitPointerArithmeticOperation(n, DSN(Some(graph)).cells(0), arg, byteSize, true) + visitPointerArithmeticOperation(n, Node(Some(graph)).cells(0), arg, byteSize, true) case _ => ??? @@ -349,7 +348,7 @@ class LocalDSA( writeToFile(graph.toDot, "test.dot") } - def analyze(): DSG = { + def analyze(): Graph = { val domain = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString) domain.foreach(visit) diff --git a/src/main/scala/analysis/SymbolicAddressAnalysis.scala b/src/main/scala/analysis/data_structure_analysis/SymbolicAddressAnalysis.scala similarity index 91% rename from src/main/scala/analysis/SymbolicAddressAnalysis.scala rename to src/main/scala/analysis/data_structure_analysis/SymbolicAddressAnalysis.scala index 3e5b2f291..52cb23639 100644 --- a/src/main/scala/analysis/SymbolicAddressAnalysis.scala +++ b/src/main/scala/analysis/data_structure_analysis/SymbolicAddressAnalysis.scala @@ -1,9 +1,9 @@ -package analysis +package analysis.data_structure_analysis import analysis.BitVectorEval.{bv2SignedInt, isNegative} import analysis.solvers.ForwardIDESolver -import ir.IRWalk.procedure -import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, BitVecType, CFGPosition, Command, DirectCall, Expr, Extract, GoTo, IndirectCall, Literal, Memory, MemoryLoad, Procedure, Program, Register, Repeat, Return, SignExtend, UnaryExpr, Variable, ZeroExtend} +import analysis.* +import ir.* import java.math.BigInteger @@ -59,7 +59,7 @@ trait SymbolicAddressFunctions(constProp: Map[CFGPosition, Map[Variable, FlatEle val valuelattice: TwoElementLattice = TwoElementLattice() val edgelattice: EdgeFunctionLattice[TwoElement, TwoElementLattice] = EdgeFunctionLattice(valuelattice) - import edgelattice.{IdEdge, ConstEdge} + import edgelattice.{ConstEdge, IdEdge} def edgesCallToEntry(call: DirectCall, entry: Procedure)(d: DL): Map[DL, EdgeFunction[TwoElement]] = d match @@ -101,7 +101,8 @@ trait SymbolicAddressFunctions(constProp: Map[CFGPosition, Map[Variable, FlatEle case Left(_) => Map(d -> IdEdge()) case Right(_) => val size = bv2SignedInt(v) - Map(d -> IdEdge(), Left(SymbolicAddress(variable, StackLocation(s"Stack_${procedure(n).name}", procedure(n), -size), 0)) -> ConstEdge(TwoElementTop)) + val procedure = IRWalk.procedure(n) + Map(d -> IdEdge(), Left(SymbolicAddress(variable, StackLocation(s"Stack_${procedure.name}", procedure, -size), 0)) -> ConstEdge(TwoElementTop)) else d match case Left(value) if value.accessor == arg1 => @@ -128,7 +129,7 @@ trait SymbolicAddressFunctions(constProp: Map[CFGPosition, Map[Variable, FlatEle d match case Left(value) if value.accessor == variable => Map() case Left(_) => Map(d -> IdEdge()) - case Right(_) => Map(d -> IdEdge(), Left(SymbolicAddress(variable, UnknownLocation(nextunknownCount, procedure(n)), 0)) -> ConstEdge(TwoElementTop)) + case Right(_) => Map(d -> IdEdge(), Left(SymbolicAddress(variable, UnknownLocation(nextunknownCount, IRWalk.procedure(n)), 0)) -> ConstEdge(TwoElementTop)) case _ => d match case Left(value) if value.accessor == variable => Map() @@ -141,7 +142,7 @@ trait SymbolicAddressFunctions(constProp: Map[CFGPosition, Map[Variable, FlatEle val size: BigInt = evaluateExpression(mallocVariable, constProp(n)) match case Some(value) => value.value case None => -1 - Map(d -> IdEdge(), Left(SymbolicAddress(mallocVariable, HeapLocation(nextMallocCount, procedure(n), size), 0)) -> ConstEdge(TwoElementTop)) + Map(d -> IdEdge(), Left(SymbolicAddress(mallocVariable, HeapLocation(nextMallocCount, IRWalk.procedure(n), size), 0)) -> ConstEdge(TwoElementTop)) case DirectCall(target, _) if target.returnBlock.isEmpty => // for when calls are non returning, kills the stack dataflow facts d match case Left(value) => diff --git a/src/main/scala/analysis/data_structure_analysis/Utility.scala b/src/main/scala/analysis/data_structure_analysis/Utility.scala new file mode 100644 index 000000000..002c1ec12 --- /dev/null +++ b/src/main/scala/analysis/data_structure_analysis/Utility.scala @@ -0,0 +1,253 @@ +package analysis.data_structure_analysis + +import analysis.solvers.{DSAUniTerm, DSAUnionFindSolver, UnionFindSolver, Var} +import analysis.* +import cfg_visualiser.{DotStruct, DotStructElement, StructArrow, StructDotGraph} +import ir.* +import specification.{ExternalFunction, SpecGlobal, SymbolTableEntry} +import util.Logger + +import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer +import scala.util.control.Breaks.{break, breakable} + +object NodeCounter { + private var counter: Int = 0 + + def getCounter: Int = + counter = counter + 1 + counter +} + +class Flags() { + var collapsed = false + var stack = false + var heap = false + var global = false + var unknown = false + var read = false + var modified = false + var incomplete = false + var foreign = false + + def join(other: Flags): Unit = + collapsed = collapsed || other.collapsed + stack = other.stack || stack + heap = other.heap || heap + global = other.global || global + unknown = other.unknown || unknown + read = other.read || read + modified = other.modified || modified + incomplete = other.incomplete || incomplete + foreign = other.foreign && foreign +} + +/** + * a Data structure Node + */ +class Node(val graph: Option[Graph], var size: BigInt = 0, val id: Int = NodeCounter.getCounter) { + + val term: DSAUniTerm = DSAUniTerm(this) + val children: mutable.Map[Node, BigInt] = mutable.Map() +// var collapsed = false + var flags: Flags = Flags() + def collapsed: Boolean = flags.collapsed + + val allocationRegions: mutable.Set[MemoryLocation] = mutable.Set() + + val cells: mutable.Map[BigInt, Cell] = mutable.Map() + this.addCell(0, 0) + + private def updateSize(newSize: BigInt): Unit = { + if newSize > size then + size = newSize + } + + def getCell(offset: BigInt): Cell = { + if (collapsed) { + cells(0) + } else if (!cells.contains(offset)) { + var result: Option[Cell] = None + cells.foreach { (start, cell) => + if (start <= offset && offset < (start + cell.largestAccessedSize)) { + result = Some(cell) + } + } + result match { + case Some(value) => value + case None => ??? + // Logger.warn(s"$this didn't have a cell at offset: $offset. An empty cell was added in") + // addCell(offset, 0) + } + } else { + cells(offset) + } + } + + + def addCell(offset: BigInt, size: Int): Cell = { + this.updateSize(offset + size) + if collapsed then + cells(0) + else if !cells.contains(offset) then + val cell = Cell(Some(this), offset) + cells.update(offset, cell) + cell.growSize(size) + cell + else + cells(offset).growSize(size) + cells(offset) + } + + def cloneSelf(graph: Graph): Node = { + val node = Node(Some(graph), this.size) + node.allocationRegions.addAll(this.allocationRegions) + node.flags.join(this.flags) + cells.foreach { (offset, cell) => + node.addCell(offset, cell.largestAccessedSize) + } + node + } + + def cloneNode(from: Graph, to: Graph): Unit = { +// assert(from.nodes.contains(this)) TODO update nodes after each phase for to check this assertion + if (!to.nodes.contains(this)) { + to.nodes.add(this) + + from.varToCell.foreach { (pos, varMap) => + varMap.foreach { (variable, slice) => + if (from.find(slice).node.equals(this)) { + if (to.varToCell.contains(pos)) { + to.varToCell(pos)(variable) = from.find(slice) + } else { + to.varToCell(pos) = mutable.Map(variable -> from.find(slice)) + } + } + } + } + from.formals.foreach { (variable, slice) => + if (from.find(slice).node.equals(this)) { + if (to.varToCell.contains(from.proc)) { + to.varToCell(from.proc)(variable) = from.find(slice) + } else { + to.varToCell(from.proc) = mutable.Map(variable -> from.find(slice)) + } + } + } + cells.values.foreach { cell => + if (cell.pointee.isDefined) { + val pointee = cell.getPointee + pointee.node.cloneNode(from, to) + // to.pointTo.update(cell, pointee) TODO check this is not necessary + } + } + } + } + + override def equals(obj: Any): Boolean = { + obj match + case node: Node => + this.id == node.id + case _ => false + } + + override def hashCode(): Int = id + + override def toString: String = s"Node($id, $allocationRegions ${if collapsed then ", collapsed" else ""})" + +} + +/** + * a cell in DSA + * @param node the node this cell belongs to + * @param offset the offset of the cell + */ +class Cell(val node: Option[Node], val offset: BigInt) { + var largestAccessedSize: Int = 0 + + // the cell's pointee + var pointee: Option[Slice] = None + + // returns the cell's pointee if it has one. + // if not it will create a placeholder, set it as the pointee of this cell and return it + def getPointee: Slice = + if pointee.isEmpty then + val node = Node(Some(this.node.get.graph.get)) + pointee = Some(Slice(node.cells(0), 0)) + else + val graph = pointee.get.node.graph.get + val resolvedPointee = graph.find(graph.adjust(pointee.get)) + + pointee = Some(graph.deadjust(resolvedPointee)) + pointee.get + + def growSize(size: Int): Boolean = + if size > largestAccessedSize then + largestAccessedSize = size + true + else false + + override def equals(obj: Any): Boolean = + obj match + case cell: Cell => this.node.equals(cell.node) && this.offset.equals(cell.offset) + case _ => false + + override def toString: String = s"Cell(${if node.isDefined then node.get.toString else "NONE"}, $offset)" +} + + +/** + * a slice made from a cell and an internal offset + */ +case class Slice(cell: Cell, internalOffset: BigInt) { + def node: Node = cell.node.get + def offset: BigInt = cell.offset +} + +/** + * represents a direct call in DSA + * @param call instance of the call + * @param graph caller's DSG + */ +class CallSite(val call: DirectCall, val graph: Graph) { + val proc: Procedure = call.target + val paramCells: mutable.Map[Variable, Slice] = graph.params(proc).foldLeft(mutable.Map[Variable, Slice]()) { + (m, reg) => + val node = Node(Some(graph)) + node.flags.incomplete = true + m += (reg -> Slice(node.cells(0), 0)) + } + val returnCells: mutable.Map[Variable, Slice] = graph.writesTo(proc).foldLeft(mutable.Map[Variable, Slice]()) { + (m, reg) => + val node = Node(Some(graph)) + node.flags.incomplete = true + m += (reg -> Slice(node.cells(0), 0)) + } +} + +case class DSAGlobal(addressRange: AddressRange, field: Field) { + lazy val start: BigInt = addressRange.start + lazy val end: BigInt = addressRange.end + lazy val node: Node = field.node + lazy val offset: BigInt = field.offset +} + +// global address range +case class AddressRange(start: BigInt, end: BigInt) + +// a node, offset pair, difference to a cell is that it doesn't represent a DSG construct, +case class Field(node: Node, offset: BigInt) + +// unwraps internal padding and slicing and returns the expression +def unwrapPaddingAndSlicing(expr: Expr): Expr = + expr match + case literal: Literal => literal + case Repeat(repeats, body) => Repeat(repeats, unwrapPaddingAndSlicing(body)) + case SignExtend(extension, body) => SignExtend(extension, unwrapPaddingAndSlicing(body)) + case UnaryExpr(op, arg) => UnaryExpr(op, arg) + case BinaryExpr(op, arg1, arg2) => BinaryExpr(op, unwrapPaddingAndSlicing(arg1), unwrapPaddingAndSlicing(arg2)) + case MemoryLoad(mem, index, endian, size) => MemoryLoad(mem, unwrapPaddingAndSlicing(index), endian, size) + case variable: Variable => variable + case Extract(_, _, body) /*if start == 0 && end == 32*/ => unwrapPaddingAndSlicing(body) // this may make it unsound + case ZeroExtend(_, body) => unwrapPaddingAndSlicing(body) + case _ => expr \ No newline at end of file diff --git a/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala b/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala index 24ed1b259..2a52591c9 100644 --- a/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala +++ b/src/main/scala/analysis/solvers/DSAUnionFindSolver.scala @@ -1,7 +1,6 @@ package analysis.solvers -import analysis.DSN - +import analysis.data_structure_analysis.Node import scala.collection.mutable class DSAUnionFindSolver extends UnionFindSolver[UniTerm] { @@ -55,7 +54,7 @@ sealed trait UniTerm /** A term variable in the solver */ -case class DSAUniTerm(node: DSN) extends Var[UniTerm] { +case class DSAUniTerm(node: Node) extends Var[UniTerm] { override def toString: String = s"Term{$node}" } diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 542b58b83..9e747fee4 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -1,11 +1,12 @@ package util -import java.io.{File, PrintWriter, FileInputStream, BufferedWriter, FileWriter, IOException} +import java.io.{BufferedWriter, File, FileInputStream, FileWriter, IOException, PrintWriter} import com.grammatech.gtirb.proto.IR.IR import com.grammatech.gtirb.proto.Module.Module import com.grammatech.gtirb.proto.Section.Section import spray.json.* import gtirb.* + import scala.collection.mutable.ListBuffer import scala.collection.mutable.ArrayBuffer import java.io.{File, PrintWriter} @@ -19,11 +20,13 @@ import boogie.* import specification.* import Parsers.* import Parsers.SemanticsParser.* +import analysis.data_structure_analysis.{DataStructureAnalysis, Graph, SymbolicAddress, SymbolicAddressAnalysis} import org.antlr.v4.runtime.tree.ParseTreeWalker import org.antlr.v4.runtime.BailErrorStrategy import org.antlr.v4.runtime.{CharStreams, CommonTokenStream, Token} import translating.* import util.Logger + import java.util.Base64 import spray.json.DefaultJsonProtocol.* import util.intrusive_list.IntrusiveList @@ -51,21 +54,21 @@ case class IRContext( /** Stores the results of the static analyses. */ case class StaticAnalysisContext( - constPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - IRconstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - memoryRegionResult: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]], - vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], - interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]], - paramResults: Map[Procedure, Set[Variable]], - steensgaardResults: Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]], - mmmResults: MemoryModelMap, - memoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - varDepsSummaries: Map[Procedure, Map[Taintable, Set[Taintable]]], - SymbolicAddressess: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], - locals: Option[Map[Procedure, DSG]], - bus: Option[Map[Procedure, DSG]], - tds: Option[Map[Procedure, DSG]], + constPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + IRconstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + memoryRegionResult: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]], + vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], + interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]], + paramResults: Map[Procedure, Set[Variable]], + steensgaardResults: Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]], + mmmResults: MemoryModelMap, + memoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + varDepsSummaries: Map[Procedure, Map[Taintable, Set[Taintable]]], + SymbolicAddressess: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], + locals: Option[Map[Procedure, Graph]], + bus: Option[Map[Procedure, Graph]], + tds: Option[Map[Procedure, Graph]], ) /** Results of the main program execution. @@ -588,16 +591,16 @@ object RunUtils { Logger.debug("[!] Running DSA Analysis") val symbolTableEntries: Set[SymbolTableEntry] = ctx.globals ++ ctx.funcEntries - val dsa = DSA(ctx.program, symResults, analysisResult.last.IRconstPropResult, symbolTableEntries, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, analysisResult.last.paramResults) + val dsa = DataStructureAnalysis(ctx.program, symResults, analysisResult.last.IRconstPropResult, symbolTableEntries, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, analysisResult.last.paramResults) dsa.analyze() assert(invariant.singleCallBlockEnd(ctx.program)) Logger.debug(s"[!] Finished indirect call resolution after $iteration iterations") analysisResult.last.copy( SymbolicAddressess = symResults, - locals = Some(dsa.locals.toMap), - bus = Some(dsa.bu.toMap), - tds = Some(dsa.td.toMap) + locals = Some(dsa.local.toMap), + bus = Some(dsa.bottomUp.toMap), + tds = Some(dsa.topDown.toMap) ) } } diff --git a/src/test/scala/DSATest.scala b/src/test/scala/DataStructureAnalysisTest.scala similarity index 97% rename from src/test/scala/DSATest.scala rename to src/test/scala/DataStructureAnalysisTest.scala index 6d6a590fc..5d3551248 100644 --- a/src/test/scala/DSATest.scala +++ b/src/test/scala/DataStructureAnalysisTest.scala @@ -1,5 +1,5 @@ -import analysis.{AddressRange, DSC, DSG, DSN, DataLocation, HeapLocation} -import ir.{Assign, BVADD, BinaryExpr, BitVecLiteral, CFGPosition, DirectCall, Endian, Memory, MemoryAssign, MemoryLoad, Program, Register, SharedMemory, cilvisitor, transforms} +import analysis.data_structure_analysis.* +import ir.* import org.scalatest.funsuite.AnyFunSuite import ir.dsl.* import specification.Specification @@ -17,7 +17,7 @@ import util.{BASILConfig, BASILResult, BoogieGeneratorConfig, ILLoadingConfig, I * BASILRESULT.analysis.get.td is the set of graphs from the end of the top-down phase * */ -class DSATest extends AnyFunSuite { +class DataStructureAnalysisTest extends AnyFunSuite { def runAnalysis(program: Program): StaticAnalysisContext = { cilvisitor.visit_prog(transforms.ReplaceReturns(), program) @@ -106,7 +106,7 @@ class DSATest extends AnyFunSuite { */ // this function asserts universal properties about global objects in Jumptable2 example - def assertJumptable2Globals(dsg: DSG): Unit = { + def assertJumptable2Globals(dsg: Graph): Unit = { // global mappings // jump_table relocation @@ -262,7 +262,7 @@ class DSATest extends AnyFunSuite { val results = runAnalysis(program) - val dsg: DSG = results.locals.get(program.mainProcedure) + val dsg: Graph = results.locals.get(program.mainProcedure) // R6 and R7 address the same cell (overlapping cells in the same node that are merged) assert(dsg.find(dsg.varToCell(locAssign1)(R6)).cell.equals(dsg.find(dsg.varToCell(locAssign2)(R7)).cell)) @@ -300,7 +300,7 @@ class DSATest extends AnyFunSuite { ) val results = runAnalysis(program) - val dsg: DSG = results.locals.get(program.mainProcedure) + val dsg: Graph = results.locals.get(program.mainProcedure) // check that R5 points to separate cell at offset 13 assert(dsg.find(dsg.varToCell(locAssign3)(R5)).offset == 13) } @@ -327,7 +327,7 @@ class DSATest extends AnyFunSuite { ) val results = runAnalysis(program) - val dsg: DSG = results.locals.get(program.mainProcedure) + val dsg: Graph = results.locals.get(program.mainProcedure) assert(dsg.find(dsg.formals(R1)).equals(dsg.find(dsg.formals(R2)))) assert(dsg.find(dsg.varToCell(locAssign1)(R6)).cell.equals(dsg.find(dsg.varToCell(locAssign2)(R7)).cell)) assert(dsg.find(dsg.varToCell(locAssign1)(R6)).cell.equals(dsg.find(dsg.varToCell(locAssign3)(R5)).cell)) @@ -361,7 +361,7 @@ class DSATest extends AnyFunSuite { val results = runAnalysis(program) - val dsg: DSG = results.locals.get(program.mainProcedure) + val dsg: Graph = results.locals.get(program.mainProcedure) assert(dsg.find(dsg.varToCell(locAssign2)(R7)).equals(dsg.find(dsg.varToCell(locAssign3)(R5)))) } From 01b6c75211134129b587efe9c634fe7fb3bb6aa3 Mon Sep 17 00:00:00 2001 From: l-kent Date: Mon, 14 Oct 2024 12:11:06 +1000 Subject: [PATCH 073/104] remove unnecessary use of options in StaticAnalysisContext for DSA results --- src/main/scala/util/RunUtils.scala | 42 +++++++++---------- .../scala/DataStructureAnalysisTest.scala | 36 ++++++++-------- 2 files changed, 39 insertions(+), 39 deletions(-) diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 9e747fee4..f6d50d80d 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -54,21 +54,21 @@ case class IRContext( /** Stores the results of the static analyses. */ case class StaticAnalysisContext( - constPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - IRconstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - memoryRegionResult: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]], - vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], - interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]], - paramResults: Map[Procedure, Set[Variable]], - steensgaardResults: Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]], - mmmResults: MemoryModelMap, - memoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - varDepsSummaries: Map[Procedure, Map[Taintable, Set[Taintable]]], - SymbolicAddressess: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], - locals: Option[Map[Procedure, Graph]], - bus: Option[Map[Procedure, Graph]], - tds: Option[Map[Procedure, Graph]], + constPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + IRconstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + memoryRegionResult: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]], + vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], + interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]], + paramResults: Map[Procedure, Set[Variable]], + steensgaardResults: Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]], + mmmResults: MemoryModelMap, + memoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + varDepsSummaries: Map[Procedure, Map[Taintable, Set[Taintable]]], + SymbolicAddressess: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], + localDSA: Map[Procedure, Graph], + bottomUpDSA: Map[Procedure, Graph], + topDownDSA: Map[Procedure, Graph] ) /** Results of the main program execution. @@ -436,9 +436,9 @@ object StaticAnalysis { SymbolicAddressess = Map.empty, reachingDefs = reachingDefinitionsAnalysisResults, varDepsSummaries = varDepsSummaries, - locals = None, - bus = None, - tds = None, + localDSA = Map.empty, + bottomUpDSA = Map.empty, + topDownDSA = Map.empty, ) } @@ -598,9 +598,9 @@ object RunUtils { Logger.debug(s"[!] Finished indirect call resolution after $iteration iterations") analysisResult.last.copy( SymbolicAddressess = symResults, - locals = Some(dsa.local.toMap), - bus = Some(dsa.bottomUp.toMap), - tds = Some(dsa.topDown.toMap) + localDSA = dsa.local.toMap, + bottomUpDSA = dsa.bottomUp.toMap, + topDownDSA = dsa.topDown.toMap ) } } diff --git a/src/test/scala/DataStructureAnalysisTest.scala b/src/test/scala/DataStructureAnalysisTest.scala index 5d3551248..bdedf8cfe 100644 --- a/src/test/scala/DataStructureAnalysisTest.scala +++ b/src/test/scala/DataStructureAnalysisTest.scala @@ -65,7 +65,7 @@ class DataStructureAnalysisTest extends AnyFunSuite { val program = results.ir.program // the dsg of the main procedure after the local phase - val dsg = results.analysis.get.locals.get(program.mainProcedure) + val dsg = results.analysis.get.localDSA(program.mainProcedure) // dsg.formals(R29) is the slice representing formal R29 val R29formal = dsg.adjust(dsg.formals(R29)) @@ -132,7 +132,7 @@ class DataStructureAnalysisTest extends AnyFunSuite { val procs = program.nameToProcedure callees.foreach { callee => - val dsg = results.analysis.get.locals.get(procs(callee)) + val dsg = results.analysis.get.localDSA(procs(callee)) assert(dsg.stackMapping.isEmpty) // stack is not used in either callee assertJumptable2Globals(dsg) // globals should be the same everywhere unused in callees // x should point to a collapsed object, in all 3 functions @@ -147,7 +147,7 @@ class DataStructureAnalysisTest extends AnyFunSuite { val results = runTest("src/test/indirect_calls/jumptable2/gcc_pic/jumptable2") val program = results.ir.program - val dsg = results.analysis.get.locals.get(program.mainProcedure) + val dsg = results.analysis.get.localDSA(program.mainProcedure) val stack0 = dsg.find(dsg.stackMapping(0).cells(0)) val stack8 = dsg.find(dsg.stackMapping(8).cells(0)) val stack16 = dsg.find(dsg.stackMapping(16).cells(0)) @@ -168,7 +168,7 @@ class DataStructureAnalysisTest extends AnyFunSuite { val results = runTest("src/test/dsa/unsafe_pointer_arithmetic/unsafe_pointer_arithmetic") val program = results.ir.program - val dsg = results.analysis.get.locals.get(program.mainProcedure) + val dsg = results.analysis.get.localDSA(program.mainProcedure) // stackX is the pointee of stack object at position X instead of the stack object itself val stack0 = dsg.adjust(dsg.find(dsg.stackMapping(0).cells(0)).getPointee) @@ -208,7 +208,7 @@ class DataStructureAnalysisTest extends AnyFunSuite { test("interproc pointer arithmetic main") { val results = runTest("src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic") val program = results.ir.program - val dsg = results.analysis.get.locals.get(program.mainProcedure) + val dsg = results.analysis.get.localDSA(program.mainProcedure) val stack0 = dsg.adjust(dsg.find(dsg.stackMapping(0).cells(0)).getPointee) val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) val stack24 = dsg.adjust(dsg.find(dsg.stackMapping(24).cells(0)).getPointee) @@ -231,7 +231,7 @@ class DataStructureAnalysisTest extends AnyFunSuite { test("interproc pointer arithmetic callee") { val results = runTest("src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic") val program = results.ir.program - val dsg = results.analysis.get.locals.get(program.nameToProcedure("callee")) + val dsg = results.analysis.get.localDSA(program.nameToProcedure("callee")) val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) val stack24 = dsg.adjust(dsg.find(dsg.stackMapping(24).cells(0)).getPointee) @@ -262,7 +262,7 @@ class DataStructureAnalysisTest extends AnyFunSuite { val results = runAnalysis(program) - val dsg: Graph = results.locals.get(program.mainProcedure) + val dsg: Graph = results.localDSA(program.mainProcedure) // R6 and R7 address the same cell (overlapping cells in the same node that are merged) assert(dsg.find(dsg.varToCell(locAssign1)(R6)).cell.equals(dsg.find(dsg.varToCell(locAssign2)(R7)).cell)) @@ -300,7 +300,7 @@ class DataStructureAnalysisTest extends AnyFunSuite { ) val results = runAnalysis(program) - val dsg: Graph = results.locals.get(program.mainProcedure) + val dsg: Graph = results.localDSA(program.mainProcedure) // check that R5 points to separate cell at offset 13 assert(dsg.find(dsg.varToCell(locAssign3)(R5)).offset == 13) } @@ -327,7 +327,7 @@ class DataStructureAnalysisTest extends AnyFunSuite { ) val results = runAnalysis(program) - val dsg: Graph = results.locals.get(program.mainProcedure) + val dsg: Graph = results.localDSA(program.mainProcedure) assert(dsg.find(dsg.formals(R1)).equals(dsg.find(dsg.formals(R2)))) assert(dsg.find(dsg.varToCell(locAssign1)(R6)).cell.equals(dsg.find(dsg.varToCell(locAssign2)(R7)).cell)) assert(dsg.find(dsg.varToCell(locAssign1)(R6)).cell.equals(dsg.find(dsg.varToCell(locAssign3)(R5)).cell)) @@ -361,7 +361,7 @@ class DataStructureAnalysisTest extends AnyFunSuite { val results = runAnalysis(program) - val dsg: Graph = results.locals.get(program.mainProcedure) + val dsg: Graph = results.localDSA(program.mainProcedure) assert(dsg.find(dsg.varToCell(locAssign2)(R7)).equals(dsg.find(dsg.varToCell(locAssign3)(R5)))) } @@ -377,7 +377,7 @@ class DataStructureAnalysisTest extends AnyFunSuite { val callees = Set("sub_seven", "add_two", "add_six") val procs = program.nameToProcedure callees.foreach { callee => - val dsg = results.analysis.get.bus.get(procs(callee)) + val dsg = results.analysis.get.bottomUpDSA(procs(callee)) assert(dsg.stackMapping.isEmpty) // stack is not used in either callee assertJumptable2Globals(dsg) // globals should be the same everywhere unused in callees // x should point to a collapsed object, in all 3 functions @@ -391,7 +391,7 @@ class DataStructureAnalysisTest extends AnyFunSuite { test("bottom up jumptable2 main") { val results = runTest("src/test/indirect_calls/jumptable2/gcc_pic/jumptable2") val program = results.ir.program - val dsg = results.analysis.get.bus.get(program.mainProcedure) + val dsg = results.analysis.get.bottomUpDSA(program.mainProcedure) val framePointer = dsg.find(dsg.stackMapping(0).cells(0)) val stack8 = dsg.find(dsg.stackMapping(8).cells(0)) @@ -413,7 +413,7 @@ class DataStructureAnalysisTest extends AnyFunSuite { // same as interproc pointer arithmetic callee's local graph (no changes should have been made) val results = runTest("src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic") val program = results.ir.program - val dsg = results.analysis.get.bus.get(program.nameToProcedure("callee")) + val dsg = results.analysis.get.bottomUpDSA(program.nameToProcedure("callee")) val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) val stack24 = dsg.adjust(dsg.find(dsg.stackMapping(24).cells(0)).getPointee) @@ -425,7 +425,7 @@ class DataStructureAnalysisTest extends AnyFunSuite { test("bottom up interproc pointer arithmetic main") { val results = runTest("src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic") val program = results.ir.program - val dsg = results.analysis.get.bus.get(program.mainProcedure) + val dsg = results.analysis.get.bottomUpDSA(program.mainProcedure) val stack0 = dsg.adjust(dsg.find(dsg.stackMapping(0).cells(0)).getPointee) val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) @@ -453,7 +453,7 @@ class DataStructureAnalysisTest extends AnyFunSuite { // no changes should be made from previous phase val results = runTest("src/test/indirect_calls/jumptable2/gcc_pic/jumptable2") val program = results.ir.program - val dsg = results.analysis.get.tds.get(program.mainProcedure) + val dsg = results.analysis.get.topDownDSA(program.mainProcedure) // assert(dsg.pointTo.size == 13) // 13 val framePointer = dsg.find(dsg.stackMapping(0).cells(0)) @@ -479,7 +479,7 @@ class DataStructureAnalysisTest extends AnyFunSuite { val callees = Set("sub_seven", "add_two", "add_six") val procs = program.nameToProcedure callees.foreach { callee => - val dsg = results.analysis.get.tds.get(procs(callee)) + val dsg = results.analysis.get.topDownDSA(procs(callee)) assert(dsg.stackMapping.isEmpty) // stack is not used in either callee assertJumptable2Globals(dsg) // globals should be the same everywhere unused in callees // x should point to a collapsed object, in all 3 functions @@ -493,7 +493,7 @@ class DataStructureAnalysisTest extends AnyFunSuite { test("top down interproc pointer arithmetic callee") { val results = runTest("src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic") val program = results.ir.program - val dsg = results.analysis.get.tds.get(program.nameToProcedure("callee")) + val dsg = results.analysis.get.topDownDSA(program.nameToProcedure("callee")) val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) val stack24 = dsg.adjust(dsg.find(dsg.stackMapping(24).cells(0)).getPointee) @@ -508,7 +508,7 @@ class DataStructureAnalysisTest extends AnyFunSuite { ignore("top down interproc pointer arithmetic main") { val results = runTest("src/test/dsa/interproc_pointer_arithmetic/interproc_pointer_arithmetic") val program = results.ir.program - val dsg = results.analysis.get.tds.get(program.mainProcedure) + val dsg = results.analysis.get.topDownDSA(program.mainProcedure) val stack0 = dsg.adjust(dsg.find(dsg.stackMapping(0).cells(0)).getPointee) val stack8 = dsg.adjust(dsg.find(dsg.stackMapping(8).cells(0)).getPointee) From b4c965fe8c553aa2f8726d2995712769b9493ae0 Mon Sep 17 00:00:00 2001 From: l-kent Date: Mon, 14 Oct 2024 13:42:52 +1000 Subject: [PATCH 074/104] more cleanup --- src/main/scala/util/RunUtils.scala | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index f6d50d80d..4529efb70 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -574,20 +574,17 @@ object RunUtils { Logger.debug("[!] Running Writes To") val writesTo = WriteToAnalysis(ctx.program).analyze() val reachingDefs = ReachingDefsAnalysis(ctx.program, writesTo).analyze() - config.analysisDotPath.foreach( - s => - writeToFile(toDot(ctx.program), s"${s}_ct.dot") - ) + config.analysisDotPath.foreach { s => + writeToFile(toDot(ctx.program), s"${s}_ct.dot") + } Logger.debug("[!] Running Symbolic Access Analysis") val symResults: Map[CFGPosition, Map[SymbolicAddress, TwoElement]] = SymbolicAddressAnalysis(ctx.program, analysisResult.last.IRconstPropResult).analyze() - config.analysisDotPath.foreach(s => - writeToFile(toDot(ctx.program, symResults.foldLeft(Map(): Map[CFGPosition, String]) { - (m, t) => - m + (t._1 -> t._2.toString) - }), s"${s}_saa.dot") - ) + config.analysisDotPath.foreach { s => + val labels = symResults.map { (k, v) => k -> v.toString } + writeToFile(toDot(ctx.program, labels), s"${s}_saa.dot") + } Logger.debug("[!] Running DSA Analysis") val symbolTableEntries: Set[SymbolTableEntry] = ctx.globals ++ ctx.funcEntries From 1c82a09499ad0b4648080ae1af218696e03408c4 Mon Sep 17 00:00:00 2001 From: l-kent Date: Mon, 14 Oct 2024 14:13:16 +1000 Subject: [PATCH 075/104] fix spelling --- src/main/scala/util/RunUtils.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 4529efb70..9a156d11f 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -65,7 +65,7 @@ case class StaticAnalysisContext( memoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], varDepsSummaries: Map[Procedure, Map[Taintable, Set[Taintable]]], - SymbolicAddressess: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], + symbolicAddresses: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], localDSA: Map[Procedure, Graph], bottomUpDSA: Map[Procedure, Graph], topDownDSA: Map[Procedure, Graph] @@ -433,7 +433,7 @@ object StaticAnalysis { steensgaardResults = steensgaardResults, mmmResults = mmm, memoryRegionContents = memoryRegionContents, - SymbolicAddressess = Map.empty, + symbolicAddresses = Map.empty, reachingDefs = reachingDefinitionsAnalysisResults, varDepsSummaries = varDepsSummaries, localDSA = Map.empty, @@ -594,7 +594,7 @@ object RunUtils { assert(invariant.singleCallBlockEnd(ctx.program)) Logger.debug(s"[!] Finished indirect call resolution after $iteration iterations") analysisResult.last.copy( - SymbolicAddressess = symResults, + symbolicAddresses = symResults, localDSA = dsa.local.toMap, bottomUpDSA = dsa.bottomUp.toMap, topDownDSA = dsa.topDown.toMap From dfc4b4e9450e48fa1fe91655ed3cd8b71e804e70 Mon Sep 17 00:00:00 2001 From: l-kent Date: Mon, 14 Oct 2024 16:31:16 +1000 Subject: [PATCH 076/104] fix bugs in DSA toDot output caused by various recent fixes --- .../scala/analysis/data_structure_analysis/Graph.scala | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/main/scala/analysis/data_structure_analysis/Graph.scala b/src/main/scala/analysis/data_structure_analysis/Graph.scala index e2e40f1c3..57a4e98a9 100644 --- a/src/main/scala/analysis/data_structure_analysis/Graph.scala +++ b/src/main/scala/analysis/data_structure_analysis/Graph.scala @@ -239,7 +239,7 @@ class Graph(val proc: Procedure, } formals.keys.foreach { variable => - structs.append(DotStruct(s"Formal_$variable", s"Formal_$variable", None)) + structs.append(DotStruct(s"Formal_${variable.name}", s"Formal_${variable.name}", None)) } pointsto.foreach { (cell, pointee) => @@ -254,7 +254,11 @@ class Graph(val proc: Procedure, } varToCell.foreach { (pos, mapping) => - var id = pos.toShortString + var id = pos match { + case p: Procedure => p.name + case b: Block => b.label + case c: Command => c.label.getOrElse("") + } if (id.startsWith("%")) { id = id.drop(1) } From 16a0d1f46801f7adcf6916f84dd4b8e1c78cb55f Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 14 Oct 2024 16:56:55 +1000 Subject: [PATCH 077/104] move dsg.toDot from LocalPhase to runutils --- .../scala/analysis/data_structure_analysis/LocalPhase.scala | 1 - src/main/scala/util/RunUtils.scala | 6 ++++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/src/main/scala/analysis/data_structure_analysis/LocalPhase.scala b/src/main/scala/analysis/data_structure_analysis/LocalPhase.scala index 390a53d34..9d9dac1f7 100644 --- a/src/main/scala/analysis/data_structure_analysis/LocalPhase.scala +++ b/src/main/scala/analysis/data_structure_analysis/LocalPhase.scala @@ -346,7 +346,6 @@ class LocalPhase(proc: Procedure, } case _ => - writeToFile(graph.toDot, "test.dot") } def analyze(): Graph = { val domain = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString) diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 9a156d11f..64df30595 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -591,6 +591,12 @@ object RunUtils { val dsa = DataStructureAnalysis(ctx.program, symResults, analysisResult.last.IRconstPropResult, symbolTableEntries, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, analysisResult.last.paramResults) dsa.analyze() + + config.analysisDotPath.foreach { s => + dsa.topDown(ctx.program.mainProcedure).toDot + writeToFile(dsa.topDown(ctx.program.mainProcedure).toDot, s"${s}_main_dsg.dot") + } + assert(invariant.singleCallBlockEnd(ctx.program)) Logger.debug(s"[!] Finished indirect call resolution after $iteration iterations") analysisResult.last.copy( From 7a9ee0461b5e65867e41fe7e4e444e57025f0e33 Mon Sep 17 00:00:00 2001 From: Sadra Bayat Tork Date: Mon, 14 Oct 2024 17:17:24 +1000 Subject: [PATCH 078/104] fixed issue with local vars dot struct --- .../analysis/data_structure_analysis/Graph.scala | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/src/main/scala/analysis/data_structure_analysis/Graph.scala b/src/main/scala/analysis/data_structure_analysis/Graph.scala index 57a4e98a9..1829bbeb3 100644 --- a/src/main/scala/analysis/data_structure_analysis/Graph.scala +++ b/src/main/scala/analysis/data_structure_analysis/Graph.scala @@ -239,7 +239,11 @@ class Graph(val proc: Procedure, } formals.keys.foreach { variable => - structs.append(DotStruct(s"Formal_${variable.name}", s"Formal_${variable.name}", None)) + var varName = variable.name + if (varName.startsWith("#")) { + varName = s"LocalVar_${varName.drop(1)}" + } + structs.append(DotStruct(s"Formal_${varName}", s"Formal_${varName}", None)) } pointsto.foreach { (cell, pointee) => @@ -263,9 +267,13 @@ class Graph(val proc: Procedure, id = id.drop(1) } mapping.foreach { (variable, slice) => - structs.append(DotStruct(s"SSA_${id}_${variable.name}", s"SSA_${pos}_${variable.name}", None, false)) + var varName = variable.name + if (varName.startsWith("#")) { + varName = s"LocalVar_${varName.drop(1)}" + } + structs.append(DotStruct(s"SSA_${id}_${varName}", s"SSA_${pos}_${varName}", None, false)) val value = find(slice) - arrows.append(StructArrow(DotStructElement(s"SSA_${id}_${variable.name}", None), DotStructElement(value.node.id.toString, Some(value.cell.offset.toString)), value.internalOffset.toString)) + arrows.append(StructArrow(DotStructElement(s"SSA_${id}_${varName}", None), DotStructElement(value.node.id.toString, Some(value.cell.offset.toString)), value.internalOffset.toString)) } } From 98d8b92ddc072c9f5cccea576d8a4d2f63a81962 Mon Sep 17 00:00:00 2001 From: l-kent Date: Tue, 15 Oct 2024 09:18:27 +1000 Subject: [PATCH 079/104] handle regions with size < 64 bits correctly --- src/main/scala/translating/IRToBoogie.scala | 92 ++++++++++++--------- 1 file changed, 52 insertions(+), 40 deletions(-) diff --git a/src/main/scala/translating/IRToBoogie.scala b/src/main/scala/translating/IRToBoogie.scala index f63c71ad5..26a6ac8f8 100644 --- a/src/main/scala/translating/IRToBoogie.scala +++ b/src/main/scala/translating/IRToBoogie.scala @@ -496,56 +496,68 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti case Some(region) => BMapVar(region.name, MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) case None => mem } - val aligned: Int = (s.address % 8).toInt - val alignedSizeMultiple = (s.bytes.size - aligned) % 8 - // index of the byte that marks the end of the part that is a multiple of 64-bits - val alignedEnd = s.bytes.size - alignedSizeMultiple - - // Aligned section that is safe to convert to 64-bit values - val alignedSection = for (b <- aligned until alignedEnd by 8) yield { - // Combine the byte constants into a 64-bit value - val combined: BigInt = - (0 until 8).foldLeft(BigInt(0))((x, y) => x + (s.bytes(b + y).value * BigInt(2).pow(y * 8))) - BinaryBExpr( + if (s.bytes.size <= 8) { + // if section is less than 8 bytes, just represent it with one access + val combined = s.bytes.indices.foldLeft(BigInt(0))((x, y) => x + (s.bytes(y).value * BigInt(2).pow(y * 8))) + val bits = s.bytes.size * 8 + Seq(BinaryBExpr( BVEQ, - BMemoryLoad(memory, BitVecBLiteral(s.address + b, 64), Endian.LittleEndian, 64), - BitVecBLiteral(combined, 64) - ) - } - - // If memory section is somehow not aligned (is this possible?) then don't convert the initial non-aligned - // section to 64-bit operations, just the rest - val unalignedStartSection = if (aligned == 0) { - Seq() + BMemoryLoad(memory, BitVecBLiteral(s.address, 64), Endian.LittleEndian, bits), + BitVecBLiteral(combined, bits) + )) } else { - for (b <- 0 until aligned) yield { + val aligned: Int = (s.address % 8).toInt + + val alignedSizeMultiple = (s.bytes.size - aligned) % 8 + // index of the byte that marks the end of the part that is a multiple of 64-bits + val alignedEnd = s.bytes.size - alignedSizeMultiple + + // Aligned section that is safe to convert to 64-bit values + val alignedSection = for (b <- aligned until alignedEnd by 8) yield { + // Combine the byte constants into a 64-bit value + val combined: BigInt = + (0 until 8).foldLeft(BigInt(0))((x, y) => x + (s.bytes(b + y).value * BigInt(2).pow(y * 8))) BinaryBExpr( BVEQ, - BMemoryLoad(memory, BitVecBLiteral(s.address + b, 64), Endian.LittleEndian, 8), - s.bytes(b).toBoogie + BMemoryLoad(memory, BitVecBLiteral(s.address + b, 64), Endian.LittleEndian, 64), + BitVecBLiteral(combined, 64) ) } - } - // If the memory section is not a multiple of 64-bits then don't convert the last section to 64-bits - // This is not ideal but will do for now - // Ideal solution is to match the sizes based on the sizes listed in the symbol table, dividing further - // for values greater than 64-bit as much as possible - // But that requires more work - // Combine the byte constants into a 64-bit value - val unalignedEndSection = if (alignedSizeMultiple == 0) { - Seq() - } else { - for (b <- alignedEnd until s.bytes.size) yield { - BinaryBExpr( - BVEQ, - BMemoryLoad(memory, BitVecBLiteral(s.address + b, 64), Endian.LittleEndian, 8), - s.bytes(b).toBoogie - ) + // If memory section is somehow not aligned (is this possible?) then don't convert the initial non-aligned + // section to 64-bit operations, just the rest + val unalignedStartSection = if (aligned == 0) { + Seq() + } else { + for (b <- 0 until aligned) yield { + BinaryBExpr( + BVEQ, + BMemoryLoad(memory, BitVecBLiteral(s.address + b, 64), Endian.LittleEndian, 8), + s.bytes(b).toBoogie + ) + } + } + + // If the memory section is not a multiple of 64-bits then don't convert the last section to 64-bits + // This is not ideal but will do for now + // Ideal solution is to match the sizes based on the sizes listed in the symbol table, dividing further + // for values greater than 64-bit as much as possible + // But that requires more work + // Combine the byte constants into a 64-bit value + val unalignedEndSection = if (alignedSizeMultiple == 0) { + Seq() + } else { + for (b <- alignedEnd until s.bytes.size) yield { + BinaryBExpr( + BVEQ, + BMemoryLoad(memory, BitVecBLiteral(s.address + b, 64), Endian.LittleEndian, 8), + s.bytes(b).toBoogie + ) + } } + unalignedStartSection ++ alignedSection ++ unalignedEndSection } - unalignedStartSection ++ alignedSection ++ unalignedEndSection } sections.toList } From eadcf67b8219ebbb0f5d5edb901f16fb09fed37b Mon Sep 17 00:00:00 2001 From: l-kent Date: Tue, 15 Oct 2024 09:19:08 +1000 Subject: [PATCH 080/104] include memory regions in renaming to avoid boogie keywords --- src/main/scala/ir/Visitor.scala | 12 ++++++++++++ src/main/scala/util/RunUtils.scala | 6 ++++-- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/src/main/scala/ir/Visitor.scala b/src/main/scala/ir/Visitor.scala index 1cc9c1b40..0b88f2a4c 100644 --- a/src/main/scala/ir/Visitor.scala +++ b/src/main/scala/ir/Visitor.scala @@ -358,6 +358,18 @@ class Substituter(variables: Map[Variable, Variable] = Map(), memories: Map[Memo * Useful for avoiding Boogie's reserved keywords. */ class Renamer(reserved: Set[String]) extends Visitor { + override def visitProgram(node: Program): Program = { + for (section <- node.usedMemory.values) { + section.region match { + case Some(region) if reserved.contains(region.name) => + region.name = s"#${region.name}" + case _ => + } + } + + super.visitProgram(node) + } + override def visitLocalVar(node: LocalVar): LocalVar = { if (reserved.contains(node.name)) { node.copy(name = s"#${node.name}") diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 7e12cc9ea..db33cb6d4 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -204,9 +204,7 @@ object IRTransform { cilvisitor.visit_prog(transforms.ConvertSingleReturn(), ctx.program) val externalRemover = ExternalRemover(externalNamesLibRemoved.toSet) - val renamer = Renamer(boogieReserved) externalRemover.visitProgram(ctx.program) - renamer.visitProgram(ctx.program) ctx } @@ -233,6 +231,10 @@ object IRTransform { val specModifies = ctx.specification.subroutines.map(s => s.name -> s.modifies).toMap ctx.program.setModifies(specModifies) + + val renamer = Renamer(boogieReserved) + renamer.visitProgram(ctx.program) + assert(invariant.singleCallBlockEnd(ctx.program)) } From 733cd0bb2a9eec5a351f66f9be12190a93d75908 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Thu, 17 Oct 2024 11:45:03 +1000 Subject: [PATCH 081/104] Added GRA --- .../scala/analysis/GlobalRegionAnalysis.scala | 177 +++++++++++++ .../InterprocSteensgaardAnalysis.scala | 218 ++++++++-------- src/main/scala/analysis/MemoryModelMap.scala | 64 ++++- .../scala/analysis/MemoryRegionAnalysis.scala | 235 +++++++----------- .../ReachingDefinitionsAnalysis.scala | 30 --- src/main/scala/analysis/UtilMethods.scala | 48 ++-- src/main/scala/analysis/VSA.scala | 187 +++++--------- .../transforms/IndirectCallResolution.scala | 173 ++++++++++++- .../scala/ir/transforms/SplitThreads.scala | 4 +- src/main/scala/util/RunUtils.scala | 51 ++-- 10 files changed, 726 insertions(+), 461 deletions(-) create mode 100644 src/main/scala/analysis/GlobalRegionAnalysis.scala diff --git a/src/main/scala/analysis/GlobalRegionAnalysis.scala b/src/main/scala/analysis/GlobalRegionAnalysis.scala new file mode 100644 index 000000000..a48745055 --- /dev/null +++ b/src/main/scala/analysis/GlobalRegionAnalysis.scala @@ -0,0 +1,177 @@ +package analysis + +import analysis.solvers.SimpleWorklistFixpointSolver +import ir.* + +import scala.collection.mutable + +trait GlobalRegionAnalysis(val program: Program, + val domain: Set[CFGPosition], + val constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + val reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + val mmm: MemoryModelMap, + val globalOffsets: Map[BigInt, BigInt]) { + + var dataCount: Int = 0 + private def nextDataCount() = { + dataCount += 1 + s"data_$dataCount" + } + + val regionLattice: PowersetLattice[DataRegion] = PowersetLattice() + + val lattice: MapLattice[CFGPosition, Set[DataRegion], PowersetLattice[DataRegion]] = MapLattice(regionLattice) + + val first: Set[CFGPosition] = Set.empty + program.mainProcedure + + private val stackPointer = Register("R31", 64) + private val linkRegister = Register("R30", 64) + private val framePointer = Register("R29", 64) + private val mallocVariable = Register("R0", 64) + + private val dataMap: mutable.HashMap[BigInt, DataRegion] = mutable.HashMap() + + private def dataPoolMaster(offset: BigInt, size: BigInt): Option[DataRegion] = { + assert(size >= 0) + if (dataMap.contains(offset)) { + if (dataMap(offset).size < (size.toDouble / 8).ceil.toInt) { + dataMap(offset) = DataRegion(dataMap(offset).regionIdentifier, offset, (size.toDouble / 8).ceil.toInt) + Some(dataMap(offset)) + } else { + Some(dataMap(offset)) + } + } else { + dataMap(offset) = DataRegion(nextDataCount(), offset, (size.toDouble / 8).ceil.toInt) + Some(dataMap(offset)) + } + } + + def getDataMap: mutable.HashMap[BigInt, DataRegion] = dataMap + + def tryCoerceIntoData(exp: Expr, n: Command, subAccess: BigInt): Set[DataRegion] = { + val eval = evaluateExpression(exp, constantProp(n)) + if (eval.isDefined) { + val region = dataPoolMaster(eval.get.value, subAccess) + if (region.isDefined) { + return Set(region.get) + } + } + exp match + case literal: BitVecLiteral => tryCoerceIntoData(literal, n, subAccess) + case Extract(end, start, body) => tryCoerceIntoData(body, n, subAccess) + case Repeat(repeats, body) => tryCoerceIntoData(body, n, subAccess) + case ZeroExtend(extension, body) => tryCoerceIntoData(body, n, subAccess) + case SignExtend(extension, body) => tryCoerceIntoData(body, n, subAccess) + case UnaryExpr(op, arg) => tryCoerceIntoData(arg, n, subAccess) + case BinaryExpr(op, arg1, arg2) => + val evalArg2 = evaluateExpression(arg2, constantProp(n)) + if (evalArg2.isDefined) { + val firstArg = tryCoerceIntoData(arg1, n, subAccess) + var regions = Set.empty[DataRegion] + for (i <- firstArg) { + if (globalOffsets.contains(i.start)) { + val newExpr = BinaryExpr(op, BitVecLiteral(globalOffsets(i.start), evalArg2.get.size), evalArg2.get) + regions = regions ++ tryCoerceIntoData(newExpr, n, subAccess) + } else { + val newExpr = BinaryExpr(op, BitVecLiteral(i.start, evalArg2.get.size), evalArg2.get) + regions = regions ++ tryCoerceIntoData(newExpr, n, subAccess) + } + } + return regions + } + Set.empty + case MemoryLoad(mem, index, endian, size) => ??? + case UninterpretedFunction(name, params, returnType) => Set.empty + case variable: Variable => + val ctx = getUse(variable, n, reachingDefs) + var collage = Set.empty[DataRegion] + for (i <- ctx) { + if (i != n) { + val tryVisit = localTransfer(i, Set.empty) + if (tryVisit.nonEmpty) { + collage = collage ++ tryVisit + } + } + } + collage + case _ => Set.empty + } + + def evalMemLoadToGlobal(index: Expr, size: BigInt, n: Command): Set[DataRegion] = { + val indexValue = evaluateExpression(index, constantProp(n)) + if (indexValue.isDefined) { + val indexValueBigInt = indexValue.get.value + val region = dataPoolMaster(indexValueBigInt, size) + if (region.isDefined) { + return Set(region.get) + } + } + tryCoerceIntoData(index, n, size) + } + + /** + * Check if the data region is defined. + * Finds full and partial matches + * Full matches sizes are altered to match the size of the data region + * Partial matches are not altered + * Otherwise the data region is returned + * + * @param dataRegions Set[DataRegion] + * @param n CFGPosition + * @return Set[DataRegion] + */ + private def checkIfDefined(dataRegions: Set[DataRegion], n: CFGPosition): Set[DataRegion] = { + var returnSet = Set.empty[DataRegion] + for (i <- dataRegions) { + val (f, p) = mmm.findDataObjectWithSize(i.start, i.size) + val accesses = f.union(p) + if (accesses.isEmpty) { + returnSet = returnSet + i + } else { + if (accesses.size == 1) { + dataMap(i.start) = DataRegion(accesses.head.regionIdentifier, i.start, i.size.max(accesses.head.size)) + returnSet = returnSet + dataMap(i.start) + } else if (accesses.size > 1) { + val highestRegion = accesses.maxBy(_.start) + dataMap(i.start) = DataRegion(accesses.head.regionIdentifier, i.start, i.size.max(highestRegion.end - i.start)) + returnSet = returnSet + dataMap(i.start) + } + } + } + returnSet + } + + /** Transfer function for state lattice elements. + */ + def localTransfer(n: CFGPosition, s: Set[DataRegion]): Set[DataRegion] = { + n match { + case cmd: Command => + cmd match { + case memAssign: MemoryAssign => + return checkIfDefined(evalMemLoadToGlobal(memAssign.index, memAssign.size, cmd), n) + case assign: Assign => + val unwrapped = unwrapExpr(assign.rhs) + if (unwrapped.isDefined) { + return checkIfDefined(evalMemLoadToGlobal(unwrapped.get.index, unwrapped.get.size, cmd), n) + } + case _ => + } + case _ => + } + Set.empty + } + + def transfer(n: CFGPosition, s: Set[DataRegion]): Set[DataRegion] = localTransfer(n, s) +} + +class GlobalRegionAnalysisSolver( + program: Program, + domain: Set[CFGPosition], + constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + mmm: MemoryModelMap, + globalOffsets: Map[BigInt, BigInt] + ) extends GlobalRegionAnalysis(program, domain, constantProp, reachingDefs, mmm, globalOffsets) + with IRIntraproceduralForwardDependencies + with Analysis[Map[CFGPosition, Set[DataRegion]]] + with SimpleWorklistFixpointSolver[CFGPosition, Set[DataRegion], PowersetLattice[DataRegion]] \ No newline at end of file diff --git a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala index c3431a29c..53b0e7b8d 100644 --- a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala +++ b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala @@ -37,7 +37,7 @@ case class RegisterWrapperEqualSets(variable: Variable, assigns: Set[Assign]) { * expression node in the AST. It is implemented using [[analysis.solvers.UnionFindSolver]]. */ class InterprocSteensgaardAnalysis( - program: Program, + domain: Set[CFGPosition], constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], mmm: MemoryModelMap, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], @@ -56,7 +56,6 @@ class InterprocSteensgaardAnalysis( val stackMap: mutable.Map[Expr, StackRegion] = mutable.Map() private val memoryRegionContents: mutable.Map[MemoryRegion, mutable.Set[BitVecLiteral | MemoryRegion]] = mutable.Map() - private val visited: mutable.Set[CFGPosition] = mutable.Set() def getMemoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]] = memoryRegionContents.map((k, v) => k -> v.toSet).toMap @@ -243,20 +242,72 @@ class InterprocSteensgaardAnalysis( } } - // def exprToRegion(expr: Expr, cmd: Command): Option[MemoryRegion] = { - // val isGlobal = evaluateExpression(expr, constantProp(cmd)) - // if (isGlobal.isDefined) { - // mmm.findDataObject(isGlobal.get.value) - // } else { - // mmm.getStack((cmd, expr)) - // } - // } + def memLoadToRegion(memLoad: MemoryLoad, cmd: Command): Set[MemoryRegion] = { + if (mmm.getStack(cmd).nonEmpty) { + mmm.getStack(cmd).asInstanceOf[Set[MemoryRegion]] + } else { + val isGlobal = evaluateExpression(memLoad.index, constantProp(cmd)) + if (isGlobal.isDefined) { + val globalRegion = mmm.findDataObject(isGlobal.get.value) + if (globalRegion.isDefined) { + return Set(globalRegion.get) + } + return Set.empty[MemoryRegion] // TODO: IT SHOULD THROW AN EXCEPTION + //throw Exception(s"Could not find region for MemLoad: $memLoad, Command: $cmd, Eval: $isGlobal, Global: $globalRegion") + } + memLoad.index match // treats case where the index is a region and is loaded again like in jumptable2/clang_pic + case variable: Variable => + val ctx = getUse(variable, cmd, reachingDefs) + for (i <- ctx) { + i.rhs match { + case load: MemoryLoad => + return memLoadToRegion(load, i) + case _ => + } + } + case _ => + + //throw Exception(s"Could not find region for MemLoad: $memLoad, Command: $cmd, Eval: $isGlobal") + Set.empty[MemoryRegion] + } + } + +// def checkValidBase(expr: Expr, cmd: Command): Option[MemoryRegion] = { +// val evaluation = evaluateExpression(expr, constantProp(cmd)) +// if (evaluation.isDefined) { +// val isGlobal = mmm.isDataBase(evaluation.get.value) +// if (isGlobal.isEmpty) { +// val isStack = mmm.isStackBase(Long.MaxValue - evaluation.get.value) +// if (isStack.isDefined) { +// return isStack +// } +// } else { +// return isGlobal +// } +// } +// None +// } + + def nodeToRegion(n: CFGPosition): Set[MemoryRegion] = { + var returnRegions = Set.empty[MemoryRegion] + n match { + case directCall: DirectCall => + returnRegions = returnRegions + mmm.getHeap(directCall).asInstanceOf[MemoryRegion] + case _ => + returnRegions = returnRegions ++ mmm.getStack(n).asInstanceOf[Set[MemoryRegion]] ++ mmm.getData(n).asInstanceOf[Set[MemoryRegion]] + } + returnRegions + } + + def canCoerceIntoDataRegion(bitVecLiteral: BitVecLiteral): Option[DataRegion] = { + mmm.isDataBase(bitVecLiteral.value) + } /** @inheritdoc */ def analyze(): Unit = // generate the constraints by traversing the AST and solve them on-the-fly - program.procedures.foreach(p => { + domain.foreach(p => { visit(p, ()) }) @@ -267,103 +318,53 @@ class InterprocSteensgaardAnalysis( * unused for this visitor */ def visit(node: CFGPosition, arg: Unit): Unit = { - if (visited.contains(node)) { - return - } node match { case cmd: Command => cmd match { - case directCall: DirectCall => + case directCall: DirectCall if directCall.target.name == "malloc" => // X = alloc P: [[X]] = ↑[[alloc-i]] - if (directCall.target.name == "malloc") { - val alloc = mmm.getHeap(directCall) - val defs = getDefinition(mallocVariable, cmd, reachingDefs, false) - unify(IdentifierVariable(RegisterVariableWrapper(mallocVariable, defs)), PointerRef(AllocVariable(alloc))) - } + val alloc = nodeToRegion(cmd).head + val defs = getDefinition(mallocVariable, cmd, reachingDefs) + unify(IdentifierVariable(RegisterWrapperEqualSets(mallocVariable, defs)), PointerRef(AllocVariable(alloc))) case assign: Assign => - assign.rhs match { - case binOp: BinaryExpr => - // X1 = &X2: [[X1]] = ↑[[X2]] - exprToRegion(binOp, cmd).foreach( - x => unify(IdentifierVariable(RegisterVariableWrapper(assign.lhs, getDefinition(assign.lhs, cmd, reachingDefs))), PointerRef(AllocVariable(x))) - ) - case variable: Variable => - // X1 = X2: [[X1]] = [[X2]] - val X1 = assign.lhs - val X2 = variable - unify(IdentifierVariable(RegisterVariableWrapper(X1, getDefinition(X1, cmd, reachingDefs))), IdentifierVariable(RegisterVariableWrapper(X2, getUse(X2, cmd, reachingDefs)))) - // TODO: should lookout for global base + offset case as well - case _ => - unwrapExpr(assign.rhs).foreach { - case memoryLoad: MemoryLoad => - // X1 = *X2: [[X2]] = ↑a ^ [[X1]] = a where a is a fresh term variable - val X1 = assign.lhs - val X2_star = exprToRegion(memoryLoad.index, cmd) - val alpha = FreshVariable() - X2_star.foreach( - x => unify(ExpressionVariable(x), PointerRef(alpha)) - ) - unify(alpha, IdentifierVariable(RegisterVariableWrapper(X1, getDefinition(X1, cmd, reachingDefs)))) - - Logger.debug("Memory load: " + memoryLoad) - Logger.debug("Index: " + memoryLoad.index) - Logger.debug("X2_star: " + X2_star) - Logger.debug("X1: " + X1) - Logger.debug("Assign: " + assign) - - // TODO: This might not be correct for globals - // X1 = &X: [[X1]] = ↑[[X2]] (but for globals) - val $X2 = exprToRegion(memoryLoad.index, cmd) - $X2.foreach( - x => unify(IdentifierVariable(RegisterVariableWrapper(assign.lhs, getDefinition(assign.lhs, cmd, reachingDefs))), PointerRef(AllocVariable(x))) - ) - case _ => // do nothing - } - } + val unwrapped = unwrapExprToVar(assign.rhs) + if (unwrapped.isDefined) { + // X1 = X2: [[X1]] = [[X2]] + val X1 = assign.lhs + val X2 = unwrapped.get + unify(IdentifierVariable(RegisterWrapperEqualSets(X1, getDefinition(X1, cmd, reachingDefs))), IdentifierVariable(RegisterWrapperEqualSets(X2, getUse(X2, cmd, reachingDefs)))) + } else { + // X1 = *X2: [[X2]] = ↑a ^ [[X1]] = a where a is a fresh term variable + val X1 = assign.lhs + val X2_star = nodeToRegion(node) + val alpha = FreshVariable() + X2_star.foreach( + x => + unify(AllocVariable(x), PointerRef(alpha)) + ) + unify(IdentifierVariable(RegisterWrapperEqualSets(X1, getDefinition(X1, cmd, reachingDefs))), alpha) + } case memoryAssign: MemoryAssign => // *X1 = X2: [[X1]] = ↑a ^ [[X2]] = a where a is a fresh term variable - val X1_star1 = exprToRegion(memoryAssign.index, cmd) - val X1_star = X1_star1.foldLeft(Set[MemoryRegion]()) { - case (acc, x) => - if (!memoryRegionContents.contains(x)) { - memoryRegionContents.addOne(x -> mutable.Set()) - } - val found = memoryRegionContents(x).filter(r => r.isInstanceOf[MemoryRegion]).map(r => r.asInstanceOf[MemoryRegion]) - if (found.nonEmpty) { - // get just the memory regions from the region contents - acc ++ found - } else { - acc + x - } - } - val X2 = evaluateExpression(memoryAssign.value, constantProp(cmd)) + val X1_star = nodeToRegion(node) // TODO: This is risky as it tries to coerce every value to a region (needed for functionpointer example) - val possibleRegions = exprToRegion(memoryAssign.value, cmd) - - Logger.debug("I am at stmt: " + cmd.label) - Logger.debug("Memory assign: " + memoryAssign) - Logger.debug("X2 is: " + X2) - Logger.debug("PossibleRegions instead of X2 " + possibleRegions) - Logger.debug("Evaluated: " + memoryAssign.value) - Logger.debug("Region " + X1_star) - Logger.debug("Index " + memoryAssign.index) + val X2 = exprToRegion(memoryAssign.value, cmd) + val alpha = FreshVariable() X1_star.foreach(x => - unify(ExpressionVariable(x), PointerRef(alpha)) - if (!memoryRegionContents.contains(x)) { - memoryRegionContents.addOne(x -> mutable.Set()) - } - if X2.isDefined then memoryRegionContents(x).add(X2.get) - memoryRegionContents(x).addAll(possibleRegions.filter(r => r != x)) + unify(AllocVariable(x), PointerRef(alpha)) ) - if X2.isDefined then unify(alpha, ExpressionVariable(X2.get)) - possibleRegions.foreach(x => unify(alpha, ExpressionVariable(x))) + X2.foreach(x => unify(AllocVariable(x), alpha)) + //val X2 = unwrapExprToVar(memoryAssign.value) +// if (X2.isDefined) { +// unify(IdentifierVariable(RegisterWrapperEqualSets(X2.get, getDefinition(X2.get, cmd, reachingDefs))), alpha) +// } else { +// throw Exception(s"Could not find variable for memoryAssign: $memoryAssign, Command: $cmd") +// } case _ => // do nothing TODO: Maybe LocalVar too? } - case _ => + case _ => } - visited.add(node) - InterProcIRCursor.succ(node).foreach(n => visit(n, ())) } private def unify(t1: Term[StTerm], t2: Term[StTerm]): Unit = { @@ -374,20 +375,24 @@ class InterprocSteensgaardAnalysis( /** @inheritdoc */ - def pointsTo(): Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]] = { + def pointsTo(): Map[RegisterWrapperEqualSets | MemoryRegion, Set[RegisterWrapperEqualSets | MemoryRegion]] = { val solution = solver.solution() val unifications = solver.unifications() Logger.debug(s"Solution: \n${solution.mkString(",\n")}\n") Logger.debug(s"Sets: \n${unifications.values.map { s => s"{ ${s.mkString(",")} }"}.mkString(", ")}") - val vars = solution.keys.collect { case id: IdentifierVariable => id } - val emptyMap = Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]]() - val pointsto = vars.foldLeft(emptyMap) { (a, v: IdentifierVariable) => - val pt: Set[RegisterVariableWrapper | MemoryRegion] = unifications(solution(v)).collect { + val vars = solution.keys + val emptyMap = Map[RegisterWrapperEqualSets | MemoryRegion, Set[RegisterWrapperEqualSets | MemoryRegion]]() + val pointsto = vars.foldLeft(emptyMap) { (a, v: Var[StTerm]) => + val pt: Set[RegisterWrapperEqualSets | MemoryRegion] = unifications(solution(v)).collect { case PointerRef(IdentifierVariable(id)) => id case PointerRef(AllocVariable(alloc)) => alloc + case AllocVariable(alloc) => alloc }.toSet - a + (v.id -> pt) + v match + case AllocVariable(alloc) => a + (alloc -> pt) + case IdentifierVariable(id) => a + (id -> pt) + case _ => a } Logger.debug(s"\nPoints-to:\n${pointsto.map(p => s"${p._1} -> { ${p._2.mkString(",")} }").mkString("\n")}\n") pointsto @@ -395,9 +400,9 @@ class InterprocSteensgaardAnalysis( /** @inheritdoc */ - def mayAlias(): (RegisterVariableWrapper, RegisterVariableWrapper) => Boolean = { + def mayAlias(): (RegisterWrapperEqualSets, RegisterWrapperEqualSets) => Boolean = { val solution = solver.solution() - (id1: RegisterVariableWrapper, id2: RegisterVariableWrapper) => + (id1: RegisterWrapperEqualSets, id2: RegisterWrapperEqualSets) => val sol1 = solution(IdentifierVariable(id1)) val sol2 = solution(IdentifierVariable(id2)) sol1 == sol2 && sol1.isInstanceOf[PointerRef] // same equivalence class, and it contains a reference @@ -417,18 +422,11 @@ case class AllocVariable(alloc: MemoryRegion) extends StTerm with Var[StTerm] { /** A term variable that represents an identifier in the program. */ -case class IdentifierVariable(id: RegisterVariableWrapper) extends StTerm with Var[StTerm] { +case class IdentifierVariable(id: RegisterWrapperEqualSets) extends StTerm with Var[StTerm] { override def toString: String = s"$id" } -/** A term variable that represents an expression in the program. - */ -case class ExpressionVariable(expr: MemoryRegion | Expr) extends StTerm with Var[StTerm] { - - override def toString: String = s"$expr" -} - /** A fresh term variable. */ case class FreshVariable(var id: Int = 0) extends StTerm with Var[StTerm] { diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index b4cd9401b..7902a754f 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -31,9 +31,10 @@ class MemoryModelMap { private val bufferedSharedStackMap: mutable.Map[String, mutable.Map[Procedure, mutable.TreeMap[RangeKey, StackRegion]]] = mutable.Map() private val heapMap: mutable.Map[RangeKey, HeapRegion] = mutable.TreeMap() private val dataMap: mutable.Map[RangeKey, DataRegion] = mutable.TreeMap() + private val cfgPositionToDataRegion: mutable.Map[CFGPosition, Set[DataRegion]] = mutable.Map() private val heapCalls: mutable.Map[DirectCall, HeapRegion] = mutable.Map() - private val stackAllocationSites: mutable.Map[(CFGPosition, Expr), StackRegion] = mutable.Map() + private val stackAllocationSites: mutable.Map[CFGPosition, Set[StackRegion]] = mutable.Map() private val uf = new UnionFind() @@ -52,7 +53,6 @@ class MemoryModelMap { case StackRegion(regionIdentifier, start, parent) => if (r.subAccesses.nonEmpty) { val max = start + r.subAccesses.max - r.fields ++= r.subAccesses.diff(Set(max)).map(_ + start) max } else { ??? @@ -77,12 +77,8 @@ class MemoryModelMap { val currentMaxRegion = currentStackMap(currentMaxRange) if (offset <= currentMaxRange.end) { currentStackMap.remove(currentMaxRange) - currentMaxRegion.fields += offset val updatedRange = RangeKey(currentMaxRange.start, (maxSize(region) - 1).max(currentMaxRange.end)) currentStackMap.addOne(updatedRange -> currentMaxRegion) - for (elem <- region.fields) { - currentMaxRegion.fields += offset + elem - } } else { currentStackMap(RangeKey(offset, maxSize(region) - 1)) = s } @@ -153,7 +149,23 @@ class MemoryModelMap { } } - def convertMemoryRegions(stackRegionsPerProcedure: mutable.Map[Procedure, mutable.Set[StackRegion]], heapRegions: mutable.Map[DirectCall, HeapRegion], mergeRegions: mutable.Set[Set[MemoryRegion]], allocationSites: mutable.Map[(CFGPosition, Expr), StackRegion], procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]]): Unit = { + def convertMemoryRegions(stackRegionsPerProcedure: mutable.Map[Procedure, mutable.Set[StackRegion]], heapRegions: mutable.Map[DirectCall, HeapRegion], mergeRegions: mutable.Set[Set[MemoryRegion]], allocationSites: Map[CFGPosition, Set[StackRegion]], procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]], graRegions: mutable.HashMap[BigInt, DataRegion], graResults: Map[CFGPosition, Set[DataRegion]]): Unit = { + //val keepData = dataMap.filterNot((range, region) => graRegions.contains(region.start)).map((range, region) => region) +// val oldRegions = dataMap.values.toSet +// dataMap.clear() +// for (dr <- graRegions.map((_, dataRegion) => dataRegion)) { +// add(dr.start, dr) +// } +// for (dr <- oldRegions) { +// val obj = findDataObject(dr.start) +// if (obj.isEmpty) { +// Logger.debug(s"Data region $dr not found in the new data map") +// } else { +// obj.get.relfContent.add(dr.regionIdentifier) +// } +// } + + cfgPositionToDataRegion ++= graResults stackAllocationSites ++= allocationSites stackRegionsPerProcedure.keys.foreach(exitNode => if (procedureToSharedRegions.contains(exitNode)) { @@ -359,12 +371,34 @@ class MemoryModelMap { def findStackObject(value: BigInt): Option[StackRegion] = stackMap.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => returnRegion(obj)) + def isStackBase(value: BigInt): Option[StackRegion] = { + val found = stackMap.find((range, _) => range.start == value) + if (found.isDefined) then Some(returnRegion(found.get._2)) else None + } + + def isDataBase(value: BigInt): Option[DataRegion] = { + val found = dataMap.find((range, _) => range.start == value) + if (found.isDefined) then Some(returnRegion(found.get._2)) else None + } + def findSharedStackObject(value: BigInt): Set[StackRegion] = sharedStackMap.values.flatMap(_.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => returnRegion(obj))).toSet - def findDataObject(value: BigInt): Option[DataRegion] = + def findDataObject(value: BigInt): Option[DataRegion] = dataMap.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => returnRegion(obj)) + def findDataObjectWithSize(value: BigInt, size: BigInt): (Set[DataRegion], Set[DataRegion]) = + // get regions that are between value and value + size and put partial regions (if part of the regions is between value and value + size) in a separate set + dataMap.foldLeft((Set.empty[DataRegion], Set.empty[DataRegion])) { case ((fullRegions, partialRegions), (range, region)) => + if (range.start >= value && range.end <= value + size - 1) { + (fullRegions + returnRegion(region), partialRegions) + } else if ((range.start < value && range.end >= value) || (range.start <= value + size - 1 && range.end > value + size - 1)) { + (fullRegions, partialRegions + returnRegion(region)) + } else { + (fullRegions, partialRegions) + } + } + override def toString: String = s"Stack: $stackMap\n Heap: $heapMap\n Data: $dataMap\n" @@ -456,16 +490,18 @@ class MemoryModelMap { heapCalls(directCall) } - def getStack(allocationSite: (CFGPosition, Expr)): Option[StackRegion] = { - val stackRegion = stackAllocationSites.get(allocationSite) - if stackRegion.isDefined then Some(returnRegion(stackAllocationSites(allocationSite))) else None + def getStack(allocationSite: CFGPosition): Set[StackRegion] = { + stackAllocationSites.getOrElse(allocationSite, Set.empty).map(returnRegion) + } + + def getData(cfgPosition: CFGPosition): Set[DataRegion] = { + cfgPositionToDataRegion.getOrElse(cfgPosition, Set.empty).map(returnRegion) } } trait MemoryRegion { val regionIdentifier: String val subAccesses: mutable.Set[BigInt] = mutable.Set() - val fields: mutable.Set[BigInt] = mutable.Set() } case class StackRegion(override val regionIdentifier: String, start: BigInt, parent: Procedure) extends MemoryRegion { @@ -477,7 +513,9 @@ case class HeapRegion(override val regionIdentifier: String, size: BigInt, paren } case class DataRegion(override val regionIdentifier: String, start: BigInt, size: BigInt) extends MemoryRegion { - override def toString: String = s"Data($regionIdentifier, $start)" + override def toString: String = s"Data($regionIdentifier, $start, $size, ($relfContent))" + def end: BigInt = start + size - 1 + val relfContent: mutable.Set[String] = mutable.Set[String]() } class UnionFind { diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 80e38ef4e..25170ef38 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -1,7 +1,7 @@ package analysis import analysis.BitVectorEval.isNegative -import analysis.solvers.WorklistFixpointSolverWithReachability +import analysis.solvers.SimpleWorklistFixpointSolver import ir.* import util.Logger @@ -9,6 +9,7 @@ import scala.collection.mutable import scala.collection.mutable.ListBuffer trait MemoryRegionAnalysis(val program: Program, + val domain: Set[CFGPosition], val globals: Map[BigInt, String], val globalOffsets: Map[BigInt, BigInt], val subroutines: Map[BigInt, String], @@ -16,8 +17,7 @@ trait MemoryRegionAnalysis(val program: Program, val ANRResult: Map[CFGPosition, Set[Variable]], val RNAResult: Map[CFGPosition, Set[Variable]], val reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - val offSetApproximation: Boolean = false, - val mmm: MemoryModelMap) { + val graResult: Map[CFGPosition, Set[DataRegion]]) { var mallocCount: Int = 0 private var stackCount: Int = 0 @@ -42,6 +42,7 @@ trait MemoryRegionAnalysis(val program: Program, * @return the stack region corresponding to the offset */ private def poolMaster(expr: BigInt, stackBase: Procedure, subAccess: BigInt): StackRegion = { + assert(subAccess >= 0) val stackPool = stackMap.getOrElseUpdate(stackBase, mutable.HashMap()) var region: StackRegion = null if (stackPool.contains(expr)) { @@ -74,18 +75,11 @@ trait MemoryRegionAnalysis(val program: Program, } } - val regionLattice: PowersetLattice[MemoryRegion] = PowersetLattice() + val regionLattice: PowersetLattice[StackRegion] = PowersetLattice() - /** - * Lifted memory region lattice, with new bottom element representing "unreachable". - */ - val liftedLattice: LiftLattice[Set[MemoryRegion], PowersetLattice[MemoryRegion]] = LiftLattice(regionLattice) - - val lattice: MapLattice[CFGPosition, LiftedElement[Set[MemoryRegion]], LiftLattice[Set[MemoryRegion], PowersetLattice[MemoryRegion]]] = MapLattice(liftedLattice) - - val domain: Set[CFGPosition] = Set.empty ++ program + val lattice: MapLattice[CFGPosition, Set[StackRegion], PowersetLattice[StackRegion]] = MapLattice(regionLattice) - val first: Set[CFGPosition] = Set.empty ++ program.procedures + val first: Set[CFGPosition] = Set.empty + program.mainProcedure private val stackPointer = Register("R31", 64) private val linkRegister = Register("R30", 64) @@ -100,7 +94,6 @@ trait MemoryRegionAnalysis(val program: Program, var procedureToHeapRegions: mutable.Map[DirectCall, HeapRegion] = mutable.Map() var memLoadToRegion: mutable.Map[MemoryLoad, MemoryRegion] = mutable.Map() var mergeRegions: mutable.Set[Set[MemoryRegion]] = mutable.Set() - var allocationSites: mutable.Map[(CFGPosition, Expr), StackRegion] = mutable.Map() def addMergableRegions(regions: Set[MemoryRegion]): Unit = { mergeRegions.add(regions) @@ -114,35 +107,22 @@ trait MemoryRegionAnalysis(val program: Program, procedureToHeapRegions.put(directCall, returnRegion) } - def addMemLoadRegion(memoryLoad: MemoryLoad, memoryRegion: MemoryRegion): Unit = { + def addMemLoadRegion(memoryLoad: MemoryLoad, memoryRegion: StackRegion): Unit = { memLoadToRegion.put(memoryLoad, memoryRegion) } - def addAllocationSite(memory: (CFGPosition, Expr), stackRegion: StackRegion): Unit = { - allocationSites.put(memory, stackRegion) - } - - def reducibleToRegion(binExpr: BinaryExpr, n: Command, subAccess: BigInt): Set[MemoryRegion] = { - var reducedRegions = Set.empty[MemoryRegion] + def reducibleToRegion(binExpr: BinaryExpr, n: Command, subAccess: BigInt): Set[StackRegion] = { + var reducedRegions = Set.empty[StackRegion] binExpr.arg1 match { case variable: Variable if !spList.contains(variable) => val ctx = getUse(variable, n, reachingDefs) for (i <- ctx) { - val regions = i.rhs match { - case memoryLoad: MemoryLoad => - eval(memoryLoad.index, Set.empty, i, memoryLoad.size) - case _: BitVecLiteral => - Set.empty - case _ => - eval(i.rhs, Set.empty, i, -1) // TODO: is the subAccess correct here? - } + val regions = eval(i.rhs, Set.empty, i, subAccess) evaluateExpression(binExpr.arg2, constantProp(n)) match { case Some(b: BitVecLiteral) => - regions.foreach { - case stackRegion: StackRegion => + regions.foreach { stackRegion => val nextOffset = bitVectorOpToBigIntOp(binExpr.op, stackRegion.start, b.value) reducedRegions = reducedRegions + poolMaster(nextOffset, IRWalk.procedure(n), subAccess) - case _ => } case None => } @@ -153,21 +133,21 @@ trait MemoryRegionAnalysis(val program: Program, reducedRegions } - def reducibleVariable(variable: Variable, n: Command, subAccess: BigInt): Set[MemoryRegion] = { - var regions = Set.empty[MemoryRegion] + def reducibleVariable(variable: Variable, n: Command, subAccess: BigInt): Set[StackRegion] = { + var regions = Set.empty[StackRegion] val ctx = getDefinition(variable, n, reachingDefs) for (i <- ctx) { - i.rhs match { - case binaryExpr: BinaryExpr => - regions = regions ++ reducibleToRegion(binaryExpr, i, subAccess) - case _ => - //regions = regions ++ eval(i.rhs, Set.empty, i) + if (i != n) { // TODO: nicer way to deal with loops (a variable is being incremented in a loop) + regions = regions ++ eval(i.rhs, Set.empty, i, subAccess) } } regions } - def eval(exp: Expr, env: Set[MemoryRegion], n: Command, subAccess: BigInt): Set[MemoryRegion] = { + def eval(exp: Expr, env: Set[StackRegion], n: Command, subAccess: BigInt): Set[StackRegion] = { + if (graResult(n).nonEmpty) { + return Set.empty // skip global memory regions + } exp match { case binOp: BinaryExpr => if (spList.contains(binOp.arg1)) { @@ -175,20 +155,20 @@ trait MemoryRegionAnalysis(val program: Program, case Some(b: BitVecLiteral) => val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value Set(poolMaster(negB, IRWalk.procedure(n), subAccess)) - case None => env + case None => Set.empty } } else if (reducibleToRegion(binOp, n, subAccess).nonEmpty) { reducibleToRegion(binOp, n, subAccess) } else { - evaluateExpression(binOp, constantProp(n)) match { - case Some(b: BitVecLiteral) => eval(b, env, n, subAccess) - case None => env - } + Set.empty } case variable: Variable => variable match { case reg: Register if spList.contains(reg) => // TODO: this is a hack because spList is not comprehensive it needs to be a standalone analysis - eval(BitVecLiteral(0, 64), env, n, subAccess) + if getDefinition(variable, n, reachingDefs).isEmpty then + Set(poolMaster(Long.MaxValue, IRWalk.procedure(n), subAccess)) + else + reducibleVariable(variable, n, subAccess) case _ => evaluateExpression(variable, constantProp(n)) match { case Some(b: BitVecLiteral) => @@ -201,8 +181,7 @@ trait MemoryRegionAnalysis(val program: Program, eval(memoryLoad.index, env, n, memoryLoad.size) // ignore case where it could be a global region (loaded later in MMM from relf) case b: BitVecLiteral => - val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value - Set(poolMaster(negB, IRWalk.procedure(n), subAccess)) + Set.empty // we cannot evaluate this to a concrete value, we need VSA for this case _ => Logger.debug(s"type: ${exp.getClass} $exp\n") @@ -212,7 +191,7 @@ trait MemoryRegionAnalysis(val program: Program, /** Transfer function for state lattice elements. */ - def localTransfer(n: CFGPosition, s: Set[MemoryRegion]): Set[MemoryRegion] = n match { + def localTransfer(n: CFGPosition, s: Set[StackRegion]): Set[StackRegion] = n match { case cmd: Command => cmd match { case directCall: DirectCall => @@ -239,102 +218,89 @@ trait MemoryRegionAnalysis(val program: Program, val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value val newHeapRegion = HeapRegion(nextMallocCount(), negB, IRWalk.procedure(n)) addReturnHeap(directCall, newHeapRegion) - regionLattice.lub(s, Set(newHeapRegion)) + s case None => s } } else { s } case memAssign: MemoryAssign => - if (evaluateExpression(memAssign.index, constantProp(n)).isDefined) { - return s // skip global memory regions - } val result = eval(memAssign.index, s, cmd, memAssign.size) - if (result.size > 1) { - //throw new Exception(s"Memory load resulted in multiple regions ${result} for mem load $memoryLoad") - addMergableRegions(result) - } - if (result.nonEmpty) { - addAllocationSite((cmd, memAssign.index), result.head.asInstanceOf[StackRegion]) - } - regionLattice.lub(s, result) +// if (result.size > 1) { +// //throw new Exception(s"Memory load resulted in multiple regions ${result} for mem load $memoryLoad") +// addMergableRegions(result) +// } + result case assign: Assign => stackDetection(assign) - var m = s - unwrapExpr(assign.rhs).foreach { - case memoryLoad: MemoryLoad => - if (evaluateExpression(memoryLoad.index, constantProp(n)).isEmpty) { // skip global memory regions - val result = eval(memoryLoad.index, s, cmd, memoryLoad.size) - if (result.size > 1) { - //throw new Exception(s"Memory load resulted in multiple regions ${result} for mem load $memoryLoad") - addMergableRegions(result) - } - if (result.nonEmpty) { - addAllocationSite((cmd, memoryLoad.index), result.head.asInstanceOf[StackRegion]) - } - m = regionLattice.lub(m, result) - } - case _ => m - } - m - case _ => s - } - case _ => s // ignore other kinds of nodes - } + var m = Set[StackRegion]() - def localTransfer2(n: CFGPosition, s: Set[MemoryRegion]): Set[MemoryRegion] = n match { - case cmd: Command => - cmd match { - case directCall: DirectCall => - if (directCall.target.name == "malloc") { - evaluateExpression(mallocVariable, constantProp(n)) match { - case Some(b: BitVecLiteral) => - val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value - val newHeapRegion = HeapRegion(nextMallocCount(), negB, IRWalk.procedure(n)) - addReturnHeap(directCall, newHeapRegion) - regionLattice.lub(s, Set(newHeapRegion)) - case None => s - } - } else { - s - } - case memAssign: MemoryAssign => - val evaluation = evaluateExpression(memAssign.index, constantProp(n)) - if (evaluation.isDefined) { - val isGlobal = mmm.findDataObject(evaluation.get.value) - if (isGlobal.isEmpty) { - val result = poolMaster(Long.MaxValue - evaluation.get.value, IRWalk.procedure(n), memAssign.size) - addAllocationSite((cmd, memAssign.index), result) - return regionLattice.lub(s, Set(result)) - } - } - s - case assign: Assign => - var m = s - unwrapExpr(assign.rhs).foreach { - case memoryLoad: MemoryLoad => - val evaluation = evaluateExpression(memoryLoad.index, constantProp(n)) - if (evaluation.isDefined) { - val isGlobal = mmm.findDataObject(evaluation.get.value) - if (isGlobal.isEmpty) { - val result = poolMaster(Long.MaxValue - evaluation.get.value, IRWalk.procedure(n), memoryLoad.size) - addAllocationSite((cmd, memoryLoad.index), result) - m = regionLattice.lub(s, Set(result)) - } - } - case _ => m - } + val unwrapped = unwrapExpr(assign.rhs) + if (unwrapped.isDefined) + val result = eval(unwrapped.get.index, s, cmd, unwrapped.get.size) +// if (result.size > 1) { +// //throw new Exception(s"Memory load resulted in multiple regions ${result} for mem load $memoryLoad") +// addMergableRegions(result) +// } + m = m ++ result m case _ => s } case _ => s // ignore other kinds of nodes } - def transferUnlifted(n: CFGPosition, s: Set[MemoryRegion]): Set[MemoryRegion] = if offSetApproximation then localTransfer2(n, s) else localTransfer(n, s) +// def localTransfer2(n: CFGPosition, s: Set[StackRegion]): Set[StackRegion] = n match { +// case cmd: Command => +// cmd match { +// case directCall: DirectCall => +// if (directCall.target.name == "malloc") { +// evaluateExpression(mallocVariable, constantProp(n)) match { +// case Some(b: BitVecLiteral) => +// val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value +// val newHeapRegion = HeapRegion(nextMallocCount(), negB, IRWalk.procedure(n)) +// addReturnHeap(directCall, newHeapRegion) +// s +// case None => s +// } +// } else { +// s +// } +// case memAssign: MemoryAssign => +// val evaluation = evaluateExpression(memAssign.index, constantProp(n)) +// if (evaluation.isDefined) { +// val isGlobal = mmm.findDataObject(evaluation.get.value) +// if (isGlobal.isEmpty) { +// val result = poolMaster(Long.MaxValue - evaluation.get.value, IRWalk.procedure(n), memAssign.size) +// return Set(result) +// } +// } +// s +// case assign: Assign => +// var m = Set[StackRegion]() +// unwrapExpr(assign.rhs).foreach { +// case memoryLoad: MemoryLoad => +// val evaluation = evaluateExpression(memoryLoad.index, constantProp(n)) +// if (evaluation.isDefined) { +// val isGlobal = mmm.findDataObject(evaluation.get.value) +// if (isGlobal.isEmpty) { +// val result = poolMaster(Long.MaxValue - evaluation.get.value, IRWalk.procedure(n), memoryLoad.size) +// m = m + result +// } +// } +// case _ => m +// } +// m +// case _ => s +// } +// case _ => s // ignore other kinds of nodes +// } + + def transfer(n: CFGPosition, s: Set[StackRegion]): Set[StackRegion] = localTransfer(n, s) } class MemoryRegionAnalysisSolver( program: Program, + domain: Set[CFGPosition], globals: Map[BigInt, String], globalOffsets: Map[BigInt, BigInt], subroutines: Map[BigInt, String], @@ -342,19 +308,8 @@ class MemoryRegionAnalysisSolver( ANRResult: Map[CFGPosition, Set[Variable]], RNAResult: Map[CFGPosition, Set[Variable]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - offSetApproximation: Boolean, - mmm: MemoryModelMap - ) extends MemoryRegionAnalysis(program, globals, globalOffsets, subroutines, constantProp, ANRResult, RNAResult, reachingDefs, offSetApproximation, mmm) + graResult: Map[CFGPosition, Set[DataRegion]] + ) extends MemoryRegionAnalysis(program, domain, globals, globalOffsets, subroutines, constantProp, ANRResult, RNAResult, reachingDefs, graResult) with IRIntraproceduralForwardDependencies - with Analysis[Map[CFGPosition, LiftedElement[Set[MemoryRegion]]]] - with WorklistFixpointSolverWithReachability[CFGPosition, Set[MemoryRegion], PowersetLattice[MemoryRegion]] { - - override def funsub(n: CFGPosition, x: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]]): LiftedElement[Set[MemoryRegion]] = { - n match { - // function entry nodes are always reachable as this is intraprocedural - case _: Procedure => liftedLattice.lift(regionLattice.bottom) - // all other nodes are processed with join+transfer - case _ => super.funsub(n, x) - } - } -} + with Analysis[Map[CFGPosition, Set[StackRegion]]] + with SimpleWorklistFixpointSolver[CFGPosition, Set[StackRegion], PowersetLattice[StackRegion]] \ No newline at end of file diff --git a/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala b/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala index 818886d1e..84525b665 100644 --- a/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala +++ b/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala @@ -21,21 +21,6 @@ case class ReachingDefinitionsAnalysis(program: Program, inter: Boolean = false) ) val domain: Set[CFGPosition] = Set.empty ++ program - var uniqueDefCounter: Int = 0 - - def nextDef(): Int = { - uniqueDefCounter += 1 - uniqueDefCounter - } - - /* - * Good enough as stmts are unique - */ - private def generateUniqueDefinition( - variable: Variable - ): Assign = { - Assign(variable, Register("Unique_" + nextDef(), 0)) - } def transfer(n: CFGPosition, s: (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]])): (Map[Variable, Set[Definition]], Map[Variable, Set[Definition]]) = localTransfer(n, s) @@ -77,25 +62,10 @@ case class ReachingDefinitionsAnalysis(program: Program, inter: Boolean = false) transformUses(assume.body.variables, s) case indirectCall: IndirectCall => transformUses(indirectCall.target.variables, s) - // if we do interproc analysis then there is no need to make any special assumptions about malloc - case directCall: DirectCall if directCall.target.name == "malloc" && !inter => - // assume R0 has been assigned, generate a fake definition - val mallocVar = Register("R0", 64) - val mallocDef = generateUniqueDefinition(mallocVar) - val mallocUseDefs: Map[Variable, Set[Definition]] = Set(mallocVar).foldLeft(Map.empty[Variable, Set[Definition]]) { - case (acc, v) => - acc + (v -> s._1(v)) - } - (s._1 + (Register("R0", 64) -> Set(mallocDef)), mallocUseDefs) case _ => s } } -class ReachingDefinitionsAnalysisSolver(program: Program) - extends ReachingDefinitionsAnalysis(program) - with SimpleWorklistFixpointSolver[CFGPosition, (Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]], Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]]), ReachingDefinitionsAnalysis#TupleElement] - with IRIntraproceduralForwardDependencies - class InterprocReachingDefinitionsAnalysisSolver(program: Program) extends ReachingDefinitionsAnalysis(program, true) with SimpleWorklistFixpointSolver[CFGPosition, (Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]], Map[Variable, Set[ReachingDefinitionsAnalysis#Definition]]), ReachingDefinitionsAnalysis#TupleElement] diff --git a/src/main/scala/analysis/UtilMethods.scala b/src/main/scala/analysis/UtilMethods.scala index a993dc767..2abc01843 100644 --- a/src/main/scala/analysis/UtilMethods.scala +++ b/src/main/scala/analysis/UtilMethods.scala @@ -143,33 +143,49 @@ def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapper } } -def getDefinition(variable: Variable, node: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], noFilter: Boolean = true): Set[Assign] = { +def getDefinition(variable: Variable, node: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]): Set[Assign] = { val (in, _) = reachingDefs(node) - if noFilter then in.getOrElse(variable, Set()) else in.getOrElse(variable, Set()).filterNot(_.rhs.variables.forall(_.name.contains("Unique"))) + in.getOrElse(variable, Set()) } -def getUse(variable: Variable, node: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], noFiler: Boolean = true): Set[Assign] = { +def getUse(variable: Variable, node: CFGPosition, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]): Set[Assign] = { val (_, out) = reachingDefs(node) - if noFiler then out.getOrElse(variable, Set()) else out.getOrElse(variable, Set()).filterNot(_.rhs.variables.forall(_.name.contains("Unique"))) + out.getOrElse(variable, Set()) } -def unwrapExpr(expr: Expr): Set[Expr] = { - var buffers: Set[Expr] = Set() +def unwrapExpr(expr: Expr): Option[MemoryLoad] = { expr match { - case e: Extract => buffers ++= unwrapExpr(e.body) - case e: SignExtend => buffers ++= unwrapExpr(e.body) - case e: ZeroExtend => buffers ++= unwrapExpr(e.body) - case repeat: Repeat => buffers ++= unwrapExpr(repeat.body) - case unaryExpr: UnaryExpr => buffers ++= unwrapExpr(unaryExpr.arg) + case e: Extract => unwrapExpr(e.body) + case e: SignExtend => unwrapExpr(e.body) + case e: ZeroExtend => unwrapExpr(e.body) + case repeat: Repeat => unwrapExpr(repeat.body) + case unaryExpr: UnaryExpr => unwrapExpr(unaryExpr.arg) case binaryExpr: BinaryExpr => - buffers ++= unwrapExpr(binaryExpr.arg1) - buffers ++= unwrapExpr(binaryExpr.arg2) + unwrapExpr(binaryExpr.arg1) + unwrapExpr(binaryExpr.arg2) case memoryLoad: MemoryLoad => - buffers += memoryLoad - buffers ++= unwrapExpr(memoryLoad.index) + Some(memoryLoad) case _ => + None + } +} + +def unwrapExprToVar(expr: Expr): Option[Variable] = { + expr match { + case variable: Variable => + Some(variable) + case e: Extract => unwrapExprToVar(e.body) + case e: SignExtend => unwrapExprToVar(e.body) + case e: ZeroExtend => unwrapExprToVar(e.body) + case repeat: Repeat => unwrapExprToVar(repeat.body) + case unaryExpr: UnaryExpr => unwrapExprToVar(unaryExpr.arg) + case binaryExpr: BinaryExpr => + unwrapExprToVar(binaryExpr.arg1) + unwrapExprToVar(binaryExpr.arg2) + case memoryLoad: MemoryLoad => unwrapExprToVar(memoryLoad.index) + case _ => + None } - buffers } def bitVectorOpToBigIntOp(op: BinOp, lhs: BigInt, rhs: BigInt): BigInt = { diff --git a/src/main/scala/analysis/VSA.scala b/src/main/scala/analysis/VSA.scala index eaaf33044..b3b3a4452 100644 --- a/src/main/scala/analysis/VSA.scala +++ b/src/main/scala/analysis/VSA.scala @@ -11,31 +11,21 @@ import util.Logger /** ValueSets are PowerSet of possible values */ trait Value { - val expr: BitVecLiteral -} -trait AddressValue extends Value { - val name: String -} - -case class GlobalAddress(override val expr: BitVecLiteral, override val name: String) extends AddressValue { - override def toString: String = "GlobalAddress(" + expr + ", " + name + ")" } -case class LocalAddress(override val expr: BitVecLiteral, override val name: String) extends AddressValue { - override def toString: String = "LocalAddress(" + expr + ", " + name + ")" +case class AddressValue(region: MemoryRegion) extends Value { + override def toString: String = "Address(" + region + ")" } case class LiteralValue(expr: BitVecLiteral) extends Value { override def toString: String = "Literal(" + expr + ")" } -trait ValueSetAnalysis(program: Program, - globals: Map[BigInt, String], - externalFunctions: Map[BigInt, String], - globalOffsets: Map[BigInt, BigInt], - subroutines: Map[BigInt, String], +trait ValueSetAnalysis(domain: Set[CFGPosition], + program: Program, mmm: MemoryModelMap, - constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]]) { + constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]) { val powersetLattice: PowersetLattice[Value] = PowersetLattice() @@ -45,57 +35,23 @@ trait ValueSetAnalysis(program: Program, val lattice: MapLattice[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]], LiftLattice[Map[Variable | MemoryRegion, Set[Value]], mapLattice.type]] = MapLattice(liftedLattice) - val domain: Set[CFGPosition] = Set.empty ++ program - - val first: Set[CFGPosition] = Set.empty ++ program - - private val stackPointer = Register("R31", 64) - private val linkRegister = Register("R30", 64) - private val framePointer = Register("R29", 64) - - private val ignoreRegions: Set[Expr] = Set(linkRegister, framePointer) + val first: Set[CFGPosition] = Set.empty + program.mainProcedure private val mallocVariable = Register("R0", 64) - private def resolveGlobalOffset(address: BigInt): String = { - val tableAddress = globalOffsets(address) - if (globals.contains(tableAddress)) { - globals(tableAddress) - } else if (subroutines.contains(tableAddress)) { - subroutines(tableAddress) - } else { - //throw Exception("Error: cannot resolve global offset " + address + " -> " + tableAddress) - "@ERROR" - } - } - - def exprToRegion(expr: Expr, n: CFGPosition): Option[MemoryRegion] = { - expr match { - case binOp: BinaryExpr if binOp.arg1 == stackPointer => - evaluateExpression(binOp.arg2, constantProp(n)) match { - case Some(b: BitVecLiteral) => mmm.findStackObject(b.value) - case None => None - } + def nodeToRegion(n: CFGPosition): Set[MemoryRegion] = { + var returnRegions = Set.empty[MemoryRegion] + n match { + case directCall: DirectCall => + returnRegions = returnRegions + mmm.getHeap(directCall).asInstanceOf[MemoryRegion] case _ => - evaluateExpression(expr, constantProp(n)) match { - case Some(b: BitVecLiteral) => mmm.findDataObject(b.value) - case None => None - } + returnRegions = returnRegions ++ mmm.getStack(n).asInstanceOf[Set[MemoryRegion]] ++ mmm.getData(n).asInstanceOf[Set[MemoryRegion]] } + returnRegions } - private def getValueType(bitVecLiteral: BitVecLiteral): Value = { - if (externalFunctions.contains(bitVecLiteral.value)) { - LocalAddress(bitVecLiteral, externalFunctions(bitVecLiteral.value)) - } else if (globals.contains(bitVecLiteral.value)) { - GlobalAddress(bitVecLiteral, globals(bitVecLiteral.value)) - } else if (globalOffsets.contains(bitVecLiteral.value)) { - GlobalAddress(bitVecLiteral, resolveGlobalOffset(bitVecLiteral.value)) - } else if (subroutines.contains(bitVecLiteral.value)) { - GlobalAddress(bitVecLiteral, subroutines(bitVecLiteral.value)) - } else { - LiteralValue(bitVecLiteral) - } + def canCoerceIntoDataRegion(bitVecLiteral: BitVecLiteral): Option[DataRegion] = { + mmm.isDataBase(bitVecLiteral.value) } /** Default implementation of eval. @@ -103,62 +59,57 @@ trait ValueSetAnalysis(program: Program, def eval(cmd: Command, s: Map[Variable | MemoryRegion, Set[Value]], n: CFGPosition): Map[Variable | MemoryRegion, Set[Value]] = { var m = s cmd match + case directCall: DirectCall if directCall.target.name == "malloc" => + val regions = nodeToRegion(n) + // malloc variable + m = m + (mallocVariable -> regions.map(r => AddressValue(r))) + m case localAssign: Assign => - localAssign.rhs match - case memoryLoad: MemoryLoad => - exprToRegion(memoryLoad.index, n) match - case Some(r: MemoryRegion) => - // this is an exception to the rule and only applies to data regions - evaluateExpression(memoryLoad.index, constantProp(n)) match - case Some(bitVecLiteral: BitVecLiteral) => - m = m + (r -> Set(getValueType(bitVecLiteral))) - m = m + (localAssign.lhs -> m(r)) - m - case None => - m = m + (localAssign.lhs -> m(r)) - m - case None => - Logger.debug("could not find region for " + localAssign) - m - case e: Expr => - evaluateExpression(e, constantProp(n)) match { - case Some(bv: BitVecLiteral) => - m = m + (localAssign.lhs -> Set(getValueType(bv))) - m - case None => - Logger.debug("could not evaluate expression" + e) - m - } + val regions = nodeToRegion(n) + if (regions.nonEmpty) { + m = m + (localAssign.lhs -> regions.map(r => AddressValue(r))) + } else { + evaluateExpression(localAssign.rhs, constantProp(n)) match + case Some(bitVecLiteral: BitVecLiteral) => + val possibleData = canCoerceIntoDataRegion(bitVecLiteral) + if (possibleData.isDefined) { + m = m + (localAssign.lhs -> Set(AddressValue(possibleData.get))) + } else { + m = m + (localAssign.lhs -> Set(LiteralValue(bitVecLiteral))) + } + case None => + val unwrapValue = unwrapExprToVar(localAssign.rhs) + unwrapValue match { + case Some(v: Variable) => + m = m + (localAssign.lhs -> m(v)) + case None => + Logger.debug(s"Too Complex: $localAssign.rhs") // do nothing + } + } + m case memAssign: MemoryAssign => - memAssign.index match - case binOp: BinaryExpr => - val region: Option[MemoryRegion] = exprToRegion(binOp, n) - region match - case Some(r: MemoryRegion) => - val storeValue = memAssign.value - evaluateExpression(storeValue, constantProp(n)) match - case Some(bitVecLiteral: BitVecLiteral) => - m = m + (r -> Set(getValueType(bitVecLiteral))) - m - /* - // TODO constant prop returned BOT OR TOP. Merge regions because RHS could be a memory loaded address - case variable: Variable => - s + (r -> s(variable)) - */ - case None => - storeValue.match { - case v: Variable => - m = m + (r -> m(v)) - m - case _ => - Logger.debug(s"Too Complex: $storeValue") // do nothing - m - } + val regions = nodeToRegion(n) + evaluateExpression(memAssign.value, constantProp(n)) match + case Some(bitVecLiteral: BitVecLiteral) => + regions.foreach { r => + val possibleData = canCoerceIntoDataRegion(bitVecLiteral) + if (possibleData.isDefined) { + m = m + (r -> Set(AddressValue(possibleData.get))) + } else { + m = m + (r -> Set(LiteralValue(bitVecLiteral))) + } + } + case None => + val unwrapValue = unwrapExprToVar(memAssign.value) + unwrapValue match { + case Some(v: Variable) => + regions.foreach { r => + m = m + (r -> m(v)) + } case None => - Logger.debug("could not find region for " + memAssign) - m - case _ => - m + Logger.debug(s"Too Complex: $memAssign.value") // do nothing + } + m case _ => m } @@ -184,15 +135,13 @@ trait ValueSetAnalysis(program: Program, } class ValueSetAnalysisSolver( + domain: Set[CFGPosition], program: Program, - globals: Map[BigInt, String], - externalFunctions: Map[BigInt, String], - globalOffsets: Map[BigInt, BigInt], - subroutines: Map[BigInt, String], mmm: MemoryModelMap, constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], -) extends ValueSetAnalysis(program, globals, externalFunctions, globalOffsets, subroutines, mmm, constantProp) - with IRInterproceduralForwardDependencies + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])] +) extends ValueSetAnalysis(domain, program, mmm, constantProp, reachingDefs) + with IRIntraproceduralForwardDependencies with Analysis[Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]] with WorklistFixpointSolverWithReachability[CFGPosition, Map[Variable | MemoryRegion, Set[Value]], MapLattice[Variable | MemoryRegion, Set[Value], PowersetLattice[Value]]] { diff --git a/src/main/scala/ir/transforms/IndirectCallResolution.scala b/src/main/scala/ir/transforms/IndirectCallResolution.scala index 2503b00ad..f0f9c338b 100644 --- a/src/main/scala/ir/transforms/IndirectCallResolution.scala +++ b/src/main/scala/ir/transforms/IndirectCallResolution.scala @@ -13,8 +13,7 @@ import scala.collection.mutable import cilvisitor._ def resolveIndirectCallsUsingPointsTo( - pointsTos: Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]], - regionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], + pointsTos: Map[RegisterWrapperEqualSets | MemoryRegion, Set[RegisterWrapperEqualSets | MemoryRegion]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], IRProgram: Program ): Boolean = { @@ -38,26 +37,38 @@ def resolveIndirectCallsUsingPointsTo( val result = mutable.Set[String]() region match { case stackRegion: StackRegion => - if (regionContents.contains(stackRegion)) { - for (c <- regionContents(stackRegion)) { + if (pointsTos.contains(stackRegion)) { + for (c <- pointsTos(stackRegion)) { c match { - case bitVecLiteral: BitVecLiteral => Logger.debug("hi: " + bitVecLiteral) //??? + case registerWrapperEqualSets: RegisterWrapperEqualSets => + pointsTos(registerWrapperEqualSets).foreach { + case memoryRegion: MemoryRegion => + result.addAll(searchRegion(memoryRegion)) + case registerWrapperEqualSets: RegisterWrapperEqualSets => throw Exception(s"possibly recursive points-to relation? should I handle this? $registerWrapperEqualSets") + } case memoryRegion: MemoryRegion => - result.addAll(searchRegion(memoryRegion)) + //result.addAll(searchRegion(memoryRegion)) + result.add(memoryRegion.regionIdentifier) // TODO: fix me } } } result case dataRegion: DataRegion => - if (!regionContents.contains(dataRegion) || regionContents(dataRegion).isEmpty) { + if (!pointsTos.contains(dataRegion) || pointsTos(dataRegion).isEmpty) { result.add(dataRegion.regionIdentifier) } else { result.add(dataRegion.regionIdentifier) // TODO: may need to investigate if we should add the parent region - for (c <- regionContents(dataRegion)) { + for (c <- pointsTos(dataRegion)) { c match { - case bitVecLiteral: BitVecLiteral => Logger.debug("hi: " + bitVecLiteral) //??? + case registerWrapperEqualSets: RegisterWrapperEqualSets => + pointsTos(registerWrapperEqualSets).foreach { + case memoryRegion: MemoryRegion => + result.addAll(searchRegion(memoryRegion)) + case registerWrapperEqualSets: RegisterWrapperEqualSets => throw Exception(s"possibly recursive points-to relation? should I handle this? $registerWrapperEqualSets") + } case memoryRegion: MemoryRegion => - result.addAll(searchRegion(memoryRegion)) + //result.addAll(searchRegion(memoryRegion)) + result.add(memoryRegion.regionIdentifier) // TODO: fix me } } } @@ -73,11 +84,11 @@ def resolveIndirectCallsUsingPointsTo( def resolveAddresses(variable: Variable, i: IndirectCall): mutable.Set[String] = { val names = mutable.Set[String]() - val variableWrapper = RegisterVariableWrapper(variable, getUse(variable, i, reachingDefs)) + val variableWrapper = RegisterWrapperEqualSets(variable, getUse(variable, i, reachingDefs)) pointsTos.get(variableWrapper) match { case Some(value) => value.map { - case v: RegisterVariableWrapper => names.addAll(resolveAddresses(v.variable, i)) + case v: RegisterWrapperEqualSets => names.addAll(resolveAddresses(v.variable, i)) case m: MemoryRegion => names.addAll(searchRegion(m)) } names @@ -147,3 +158,141 @@ def resolveIndirectCallsUsingPointsTo( modified } + + +def resolveIndirectCallsUsingVSA( + vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], + IRProgram: Program + ): Boolean = { + var modified: Boolean = false + val worklist = ListBuffer[CFGPosition]() + + worklist.addAll(IRProgram) + + val visited = mutable.Set[CFGPosition]() + while (worklist.nonEmpty) { + val node = worklist.remove(0) + if (!visited.contains(node)) { + // add to worklist before we delete the node and can no longer find its successors + InterProcIRCursor.succ(node).foreach(node => worklist.addOne(node)) + process(node) + visited.add(node) + } + } + + def addFakeProcedure(name: String): Procedure = { + val newProcedure = Procedure(name) + IRProgram.procedures += newProcedure + newProcedure + } + + def searchRegion(memoryRegion: MemoryRegion, n: CFGPosition): mutable.Set[String] = { + val names = mutable.Set[String]() + memoryRegion match { + case stackRegion: StackRegion => + vsaResult.get(n) match + case Some(value) => value match + case Lift(el) => el.get(stackRegion) match + case Some(value) => value.map { + case addressValue: AddressValue => names.addAll(searchRegion(addressValue.region, n)) + case literalValue: LiteralValue => + } + case None => + case LiftedBottom => + case _ => + case None => + case dataRegion: DataRegion => + names.add(dataRegion.regionIdentifier) + vsaResult.get(n) match + case Some(value) => value match + case Lift(el) => el.get(dataRegion) match + case Some(value) => value.map { + case addressValue: AddressValue => names.addAll(searchRegion(addressValue.region, n)) + case literalValue: LiteralValue => + } + case None => + case LiftedBottom => + case _ => + case None => + } + names + } + + def resolveAddresses(variable: Variable, i: IndirectCall): mutable.Set[String] = { + val names = mutable.Set[String]() + vsaResult.get(i) match + case Some(value) => value match + case Lift(el) => el.get(variable) match + case Some(value) => value.map { + case addressValue: AddressValue => names.addAll(searchRegion(addressValue.region, i)) + case literalValue: LiteralValue => + } + case None => + case LiftedBottom => + case _ => + case None => + names + } + + def process(n: CFGPosition): Unit = n match { + case indirectCall: IndirectCall if indirectCall.target != Register("R30", 64) => + if (!indirectCall.hasParent) { + // skip if we have already processesd this call + return + } + // we need the single-call-at-end-of-block invariant + assert(indirectCall.parent.statements.lastOption.contains(indirectCall)) + + val block = indirectCall.parent + val procedure = block.parent + + val targetNames = resolveAddresses(indirectCall.target, indirectCall) + Logger.debug(s"VSA approximated call ${indirectCall.target} with $targetNames") + Logger.debug(IRProgram.procedures) + val targets: mutable.Set[Procedure] = + targetNames.map(name => IRProgram.procedures.find(_.name == name).getOrElse(addFakeProcedure(name))) + + if (targets.nonEmpty) { + Logger.debug(s"Resolved indirect call $indirectCall") + } + + if (targets.size == 1) { + modified = true + + val newCall = DirectCall(targets.head, indirectCall.label) + block.statements.replace(indirectCall, newCall) + } else if (targets.size > 1) { + + val oft = indirectCall.parent.jump + + modified = true + val newBlocks = ArrayBuffer[Block]() + for (t <- targets) { + Logger.debug(targets) + val address = t.address.match { + case Some(a) => a + case None => + throw Exception(s"resolved indirect call $indirectCall to procedure which does not have address: $t") + } + val assume = Assume(BinaryExpr(BVEQ, indirectCall.target, BitVecLiteral(address, 64))) + val newLabel: String = block.label + t.name + val directCall = DirectCall(t) + + /* copy the goto node resulting */ + val fallthrough = oft match { + case g: GoTo => GoTo(g.targets, g.label) + case h: Unreachable => Unreachable() + case r: Return => Return() + } + newBlocks.append(Block(newLabel, None, ArrayBuffer(assume, directCall), fallthrough)) + } + block.statements.remove(indirectCall) + procedure.addBlocks(newBlocks) + val newCall = GoTo(newBlocks, indirectCall.label) + block.replaceJump(newCall) + } + case _ => + } + + modified +} \ No newline at end of file diff --git a/src/main/scala/ir/transforms/SplitThreads.scala b/src/main/scala/ir/transforms/SplitThreads.scala index 8678720e7..1496c5e33 100644 --- a/src/main/scala/ir/transforms/SplitThreads.scala +++ b/src/main/scala/ir/transforms/SplitThreads.scala @@ -20,7 +20,7 @@ import cilvisitor._ // do reachability analysis // also need a bit in the IR where it creates separate files def splitThreads(program: Program, - pointsTo: Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]], + pointsTo: Map[RegisterWrapperEqualSets | MemoryRegion, Set[RegisterWrapperEqualSets | MemoryRegion]], regionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])] ): Unit = { @@ -34,7 +34,7 @@ def splitThreads(program: Program, // look up R2 value using points to results val R2 = Register("R2", 64) val b = reachingDefs(d) - val R2Wrapper = RegisterVariableWrapper(R2, getDefinition(R2, d, reachingDefs)) + val R2Wrapper = RegisterWrapperEqualSets(R2, getDefinition(R2, d, reachingDefs)) val threadTargets = pointsTo(R2Wrapper) if (threadTargets.size > 1) { diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 4d4a960bc..32fbecd01 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -54,11 +54,11 @@ case class IRContext( case class StaticAnalysisContext( constPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], IRconstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - memoryRegionResult: Map[CFGPosition, LiftedElement[Set[MemoryRegion]]], + memoryRegionResult: Map[CFGPosition, Set[StackRegion]], vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]], paramResults: Map[Procedure, Set[Variable]], - steensgaardResults: Map[RegisterVariableWrapper, Set[RegisterVariableWrapper | MemoryRegion]], + steensgaardResults: Map[RegisterWrapperEqualSets | MemoryRegion, Set[RegisterWrapperEqualSets | MemoryRegion]], mmmResults: MemoryModelMap, memoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], @@ -316,6 +316,7 @@ object StaticAnalysis { val mergedSubroutines = subroutines ++ externalAddresses val domain = computeDomain(IntraProcIRCursor, IRProgram.procedures) + val interDomain = computeDomain(InterProcIRCursor, IRProgram.procedures) Logger.debug("[!] Running ANR") val ANRSolver = ANRAnalysisSolver(IRProgram) @@ -360,17 +361,14 @@ object StaticAnalysis { ) }) - Logger.debug("[!] Running Constant Propagation with SSA") - val constPropSolverWithSSA = ConstantPropagationSolverWithSSA(IRProgram, reachingDefinitionsAnalysisResults) - val constPropResultWithSSA = constPropSolverWithSSA.analyze() - val mmm = MemoryModelMap() mmm.preLoadGlobals(mergedSubroutines, globalOffsets, globalAddresses, globalSizes) + val graSolver = GlobalRegionAnalysisSolver(IRProgram, domain.toSet, constPropResult, reachingDefinitionsAnalysisResults, mmm, globalOffsets) + val graResult = graSolver.analyze() + Logger.debug("[!] Running MRA") - val assumeR31 = false - val constantPropForMRA = ConstantPropagationSolver(IRProgram, assumeR31).analyze() - val mraSolver = MemoryRegionAnalysisSolver(IRProgram, globalAddresses, globalOffsets, mergedSubroutines, constantPropForMRA, ANRResult, RNAResult, reachingDefinitionsAnalysisResults, assumeR31, mmm) + val mraSolver = MemoryRegionAnalysisSolver(IRProgram, domain.toSet, globalAddresses, globalOffsets, mergedSubroutines, constPropResult, ANRResult, RNAResult, reachingDefinitionsAnalysisResults, graResult) val mraResult = mraSolver.analyze() config.analysisDotPath.foreach(s => { @@ -389,14 +387,30 @@ object StaticAnalysis { toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> mraResult(b).toString).toMap), s"${s}_MRA$iteration.dot" ) + + writeToFile( + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> graResult(b).toString).toMap), + s"${s}_GRA$iteration.dot" + ) }) Logger.debug("[!] Running MMM") - mmm.convertMemoryRegions(mraSolver.procedureToStackRegions, mraSolver.procedureToHeapRegions, mraSolver.mergeRegions, mraSolver.allocationSites, mraSolver.procedureToSharedRegions) + mmm.convertMemoryRegions(mraSolver.procedureToStackRegions, mraSolver.procedureToHeapRegions, mraSolver.mergeRegions, mraResult, mraSolver.procedureToSharedRegions, graSolver.getDataMap, graResult) mmm.logRegions() + Logger.debug("[!] Running VSA") + val vsaSolver = ValueSetAnalysisSolver(domain.toSet, IRProgram, mmm, constPropResult, reachingDefinitionsAnalysisResults) + val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = vsaSolver.analyze() + + config.analysisDotPath.foreach(s => { + writeToFile( + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> vsaResult(b).toString).toMap), + s"${s}_VSA$iteration.dot" + ) + }) + Logger.debug("[!] Running Steensgaard") - val steensgaardSolver = InterprocSteensgaardAnalysis(IRProgram, constPropResult, mmm, reachingDefinitionsAnalysisResults, globalOffsets) + val steensgaardSolver = InterprocSteensgaardAnalysis(interDomain.toSet, constPropResult, mmm, reachingDefinitionsAnalysisResults, globalOffsets) steensgaardSolver.analyze() val steensgaardResults = steensgaardSolver.pointsTo() val memoryRegionContents = steensgaardSolver.getMemoryRegionContents @@ -406,10 +420,6 @@ object StaticAnalysis { val regionInjector = RegionInjector(domain, IRProgram, constPropResult, mmm, reachingDefinitionsAnalysisResults, globalOffsets) regionInjector.nodeVisitor() - Logger.debug("[!] Running VSA") - val vsaSolver = ValueSetAnalysisSolver(IRProgram, globalAddresses, externalAddresses, globalOffsets, subroutines, mmm, constPropResult) - val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = vsaSolver.analyze() - var paramResults: Map[Procedure, Set[Variable]] = Map.empty var interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]] = Map.empty @@ -545,10 +555,13 @@ object RunUtils { val result = StaticAnalysis.analyse(ctx, config, iteration) analysisResult.append(result) Logger.debug("[!] Replacing Indirect Calls") - modified = transforms.resolveIndirectCallsUsingPointsTo( - result.steensgaardResults, - result.memoryRegionContents, - result.reachingDefs, +// modified = transforms.resolveIndirectCallsUsingPointsTo( +// result.steensgaardResults, +// result.reachingDefs, +// ctx.program +// ) + modified = transforms.resolveIndirectCallsUsingVSA( + result.vsaResult, ctx.program ) Logger.debug("[!] Generating Procedure Summaries") From 72c238ec9cbd7a2eb59d0e39a42673523a84e36a Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Thu, 17 Oct 2024 13:12:12 +1000 Subject: [PATCH 082/104] Better Data Region Approximation --- src/main/scala/analysis/Analysis.scala | 4 +- .../scala/analysis/GlobalRegionAnalysis.scala | 7 +- src/main/scala/analysis/Lattice.scala | 4 +- .../analysis/LoopConditionEvaluator.scala | 13 --- src/main/scala/analysis/MemoryModelMap.scala | 26 ++--- src/main/scala/analysis/SSAForm.scala | 100 ------------------ .../scala/analysis/SteensgaardAnalysis.scala | 0 src/main/scala/analysis/VSA.scala | 2 +- .../analysis/solvers/AbstractSPAnalysis.scala | 95 ----------------- .../transforms/IndirectCallResolution.scala | 2 +- 10 files changed, 24 insertions(+), 229 deletions(-) delete mode 100644 src/main/scala/analysis/LoopConditionEvaluator.scala delete mode 100644 src/main/scala/analysis/SSAForm.scala delete mode 100644 src/main/scala/analysis/SteensgaardAnalysis.scala delete mode 100644 src/main/scala/analysis/solvers/AbstractSPAnalysis.scala diff --git a/src/main/scala/analysis/Analysis.scala b/src/main/scala/analysis/Analysis.scala index 822c8ec2f..b781ee1d5 100644 --- a/src/main/scala/analysis/Analysis.scala +++ b/src/main/scala/analysis/Analysis.scala @@ -169,10 +169,10 @@ trait ConstantPropagationWithSSA(val program: Program, val reachingDefs: Map[CFG // assignments case a: Assign => val lhsWrappers = s.collect { - case (k, v) if RegisterWrapperPartialEquality(k.variable, k.assigns) == RegisterWrapperPartialEquality(a.lhs, getDefinition(a.lhs, r, reachingDefs)) => (k, v) + case (k, v) if RegisterVariableWrapper(k.variable, k.assigns) == RegisterVariableWrapper(a.lhs, getDefinition(a.lhs, r, reachingDefs)) => (k, v) } if (lhsWrappers.nonEmpty) { - s ++ lhsWrappers.map((k, v) => (RegisterWrapperEqualSets(k.variable, k.assigns ++ getDefinition(a.lhs, r, reachingDefs)), v.union(eval(a.rhs, s, r)))) + s ++ lhsWrappers.map((k, v) => (k, v.union(eval(a.rhs, s, r)))) } else { s + (RegisterWrapperEqualSets(a.lhs, getDefinition(a.lhs, r, reachingDefs)) -> eval(a.rhs, s, n)) } diff --git a/src/main/scala/analysis/GlobalRegionAnalysis.scala b/src/main/scala/analysis/GlobalRegionAnalysis.scala index a48745055..5e61463a9 100644 --- a/src/main/scala/analysis/GlobalRegionAnalysis.scala +++ b/src/main/scala/analysis/GlobalRegionAnalysis.scala @@ -129,11 +129,11 @@ trait GlobalRegionAnalysis(val program: Program, returnSet = returnSet + i } else { if (accesses.size == 1) { - dataMap(i.start) = DataRegion(accesses.head.regionIdentifier, i.start, i.size.max(accesses.head.size)) + dataMap(i.start) = DataRegion(i.regionIdentifier, i.start, i.size.max(accesses.head.size)) returnSet = returnSet + dataMap(i.start) } else if (accesses.size > 1) { val highestRegion = accesses.maxBy(_.start) - dataMap(i.start) = DataRegion(accesses.head.regionIdentifier, i.start, i.size.max(highestRegion.end - i.start)) + dataMap(i.start) = DataRegion(i.regionIdentifier, i.start, i.size.max(highestRegion.end - i.start)) returnSet = returnSet + dataMap(i.start) } } @@ -153,6 +153,9 @@ trait GlobalRegionAnalysis(val program: Program, val unwrapped = unwrapExpr(assign.rhs) if (unwrapped.isDefined) { return checkIfDefined(evalMemLoadToGlobal(unwrapped.get.index, unwrapped.get.size, cmd), n) + } else { + // this is a constant but we need to check if it is a data region + return checkIfDefined(evalMemLoadToGlobal(assign.rhs, 1, cmd), n) } case _ => } diff --git a/src/main/scala/analysis/Lattice.scala b/src/main/scala/analysis/Lattice.scala index 34dc8f43a..7b128967d 100644 --- a/src/main/scala/analysis/Lattice.scala +++ b/src/main/scala/analysis/Lattice.scala @@ -662,8 +662,8 @@ class ValueSetLattice[T] extends Lattice[ValueSet[T]] { extension (r: DataRegion | StackRegion) def start: BigInt = r match { - case d: DataRegion => d.start.value - case s: StackRegion => s.start.value + case d: DataRegion => d.start + case s: StackRegion => s.start } def end(mmm: MemoryModelMap): BigInt = r match { diff --git a/src/main/scala/analysis/LoopConditionEvaluator.scala b/src/main/scala/analysis/LoopConditionEvaluator.scala deleted file mode 100644 index 2659c6ecd..000000000 --- a/src/main/scala/analysis/LoopConditionEvaluator.scala +++ /dev/null @@ -1,13 +0,0 @@ -//package analysis -//import ir.* -//import util.* -// -//class LoopConditionEvaluator(context: Map[CFGPosition, Map[Variable, Set[BitVecLiteral]]], reachingDefs: Map[CFGPosition, Map[Variable, Set[LocalAssign]]]) { -// def evaluate(loop: Loop): Set[BitVecLiteral] = { -// val loopCondition = loop.condition -// val loopHeader = loop.header -// val loopHeaderContext = context(loopHeader) -// val loopConditionResult = evaluateExpressionWithSSA(loopCondition, loopHeaderContext, loopHeader, reachingDefs) -// loopConditionResult -// } -//} diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index 7902a754f..ee0c0cb2d 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -151,19 +151,19 @@ class MemoryModelMap { def convertMemoryRegions(stackRegionsPerProcedure: mutable.Map[Procedure, mutable.Set[StackRegion]], heapRegions: mutable.Map[DirectCall, HeapRegion], mergeRegions: mutable.Set[Set[MemoryRegion]], allocationSites: Map[CFGPosition, Set[StackRegion]], procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]], graRegions: mutable.HashMap[BigInt, DataRegion], graResults: Map[CFGPosition, Set[DataRegion]]): Unit = { //val keepData = dataMap.filterNot((range, region) => graRegions.contains(region.start)).map((range, region) => region) -// val oldRegions = dataMap.values.toSet -// dataMap.clear() -// for (dr <- graRegions.map((_, dataRegion) => dataRegion)) { -// add(dr.start, dr) -// } -// for (dr <- oldRegions) { -// val obj = findDataObject(dr.start) -// if (obj.isEmpty) { -// Logger.debug(s"Data region $dr not found in the new data map") -// } else { -// obj.get.relfContent.add(dr.regionIdentifier) -// } -// } + val oldRegions = dataMap.values.toSet + dataMap.clear() + for (dr <- graRegions.map((_, dataRegion) => dataRegion)) { + add(dr.start, dr) + } + for (dr <- oldRegions) { + val obj = findDataObject(dr.start) + if (obj.isEmpty) { + Logger.debug(s"Data region $dr not found in the new data map") + } else { + obj.get.relfContent.add(dr.regionIdentifier) + } + } cfgPositionToDataRegion ++= graResults stackAllocationSites ++= allocationSites diff --git a/src/main/scala/analysis/SSAForm.scala b/src/main/scala/analysis/SSAForm.scala deleted file mode 100644 index 044780a6b..000000000 --- a/src/main/scala/analysis/SSAForm.scala +++ /dev/null @@ -1,100 +0,0 @@ -//package analysis -// -//import analysis.* -//import ir.{SignExtend, *} -//import util.Logger -// -//import scala.collection.mutable -// -///** Set-Based SSA -// * - Each variable has a set of versions -// * - New assignments create new versions and replaces any new versions -// * -// * NOTE: This approach does not make an attempt to handle loops -// */ -//class SSAForm(program: Program) { -// -// private val varMaxTracker = mutable.HashMap[String, Int]() -// private val blockBasedMappings = mutable.HashMap[(Block, String), mutable.Set[Int]]().withDefault(_ => mutable.Set()) -// private val context = mutable.HashMap[(Procedure, String), mutable.Set[Int]]().withDefault(_ => mutable.Set()) -// private def getMax(varName: String): Int = -// val ret = varMaxTracker.getOrElse(varName, 0) -// varMaxTracker(varName) = ret + 1 -// ret -// -// private def transformVariables(vars: Set[Variable], block: Block, proc: Procedure): Unit = { -// vars.foreach { v => -// if (context.contains((proc, v.name))) { -// v.sharedVariable = true -// } -// v.ssa_id.clear() -// val contextResult = context.getOrElseUpdate((proc, v.name), mutable.Set(getMax(v.name))) -// v.ssa_id.addAll(blockBasedMappings.getOrElseUpdate((block, v.name), contextResult)) -// } -// } -// -// def applySSA(): Unit = { -// for (proc <- program.procedures) { -// val visitedBlocks = mutable.Set[Block]() -// val stack = mutable.Stack[Block]() -// -// // Start with the entry block -// if (proc.entryBlock.isDefined) { -// stack.push(proc.entryBlock.get) -// } -// -// while (stack.nonEmpty) { -// val currentBlock = stack.pop() -// -// if (!visitedBlocks.contains(currentBlock)) { -// visitedBlocks.add(currentBlock) -// -// for (stmt <- currentBlock.statements) { -// Logger.debug(stmt) -// stmt match { -// case localAssign: LocalAssign => -// transformVariables(localAssign.rhs.variables, currentBlock, proc) -// val maxVal = varMaxTracker.getOrElseUpdate(localAssign.lhs.name, 0) -// blockBasedMappings((currentBlock, localAssign.lhs.name)) = mutable.Set(maxVal) -// -// localAssign.lhs.ssa_id.clear() -// localAssign.lhs.ssa_id.addAll(blockBasedMappings((currentBlock, localAssign.lhs.name))) -// -// varMaxTracker(localAssign.lhs.name) = blockBasedMappings((currentBlock, localAssign.lhs.name)).max + 1 -// -// case memoryAssign: MemoryAssign => -// transformVariables(memoryAssign.rhs.variables, currentBlock, proc) -// -// case assume: Assume => -// transformVariables(assume.body.variables, currentBlock, proc) -// // no required for analyses -// case assert: Assert => -// transformVariables(assert.body.variables, currentBlock, proc) -// // no required for analyses -// case _ => throw new RuntimeException("No SSA form for " + stmt.getClass + " yet") -// } -// } -// currentBlock.jump match { -// case directCall: DirectCall => -// // TODO: transfers the whole context but it could be using ANR and RNA to transfer only the relevant context -// varMaxTracker.keys.foreach { varr => -// //context((directCall.target, varr)) = context((directCall.target, varr)) ++ blockBasedMappings(block, varr) -// context.getOrElseUpdate((directCall.target, varr), mutable.Set()) ++= blockBasedMappings((currentBlock, varr)) -// } -// case indirectCall: IndirectCall => -// transformVariables(indirectCall.target.variables, currentBlock, proc) -// case goTo: GoTo => -// for { -// b <- goTo.targets -// varr <- varMaxTracker.keys -// } { -// blockBasedMappings((b, varr)) ++= blockBasedMappings(currentBlock, varr) -// } -// } -// // Push unvisited successors onto the stack -// stack.pushAll(currentBlock.nextBlocks) -// } -// } -// } -// } -//} diff --git a/src/main/scala/analysis/SteensgaardAnalysis.scala b/src/main/scala/analysis/SteensgaardAnalysis.scala deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/main/scala/analysis/VSA.scala b/src/main/scala/analysis/VSA.scala index b3b3a4452..bbdcb1d60 100644 --- a/src/main/scala/analysis/VSA.scala +++ b/src/main/scala/analysis/VSA.scala @@ -83,7 +83,7 @@ trait ValueSetAnalysis(domain: Set[CFGPosition], case Some(v: Variable) => m = m + (localAssign.lhs -> m(v)) case None => - Logger.debug(s"Too Complex: $localAssign.rhs") // do nothing + Logger.debug(s"Too Complex: ${localAssign.rhs}") // do nothing } } m diff --git a/src/main/scala/analysis/solvers/AbstractSPAnalysis.scala b/src/main/scala/analysis/solvers/AbstractSPAnalysis.scala deleted file mode 100644 index 36efb4a9d..000000000 --- a/src/main/scala/analysis/solvers/AbstractSPAnalysis.scala +++ /dev/null @@ -1,95 +0,0 @@ -//package analysis.solvers -// -//import ir.* -//import analysis.solvers._ -//import analysis.* -// -//import scala.collection.immutable -//import scala.collection.mutable -// -// -//class AbstractSP(val locations: Set[BitVecLiteral], val definitions: Set[LocalAssign]) { -// override def toString: String = "AbstractSP(" + location + ")" -// -// def add(that: BitVecLiteral, definer: Set[LocalAssign]): AbstractSP = { -// val newLocations = locations.map(l => BitVectorEval.smt_bvadd(l, that)) -// AbstractSP(newLocations, definer) -// } -// -// def sub(that: BitVecLiteral, definer: Set[LocalAssign]): AbstractSP = { -// val newLocations = locations.map(l => BitVectorEval.smt_bvsub(l, that)) -// AbstractSP(newLocations, definer) -// } -// -// def union(that: AbstractSP): AbstractSP = { -// AbstractSP(locations ++ that.locations, definitions ++ that.definitions) -// } -//} -// -//class TopAbstractSP extends AbstractSP(Set.empty, Set.empty) { -// override def toString: String = "TopAbstractSP" -//} -// -// -///** -// * Tracks the stack pointer abstractly and offers calculations for the stack pointer. -// * Uses -// */ -//trait AbstractSPAnalysis(program: Program, constantProp: Map[CFGPosition, Map[RegisterWrapperPartialEquality, Set[BitVecLiteral]]]) { -// -// val mapLattice: MapLattice[RegisterWrapperPartialEquality, FlatElement[AbstractSP], FlatLattice[AbstractSP]] = MapLattice(AbstractSPLattice()) -// -// val lattice: MapLattice[CFGPosition, Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]], MapLattice[RegisterWrapperPartialEquality, FlatElement[AbstractSP], FlatLattice[AbstractSP]]] = MapLattice(mapLattice) -// -// val domain: Set[CFGPosition] = Set.empty ++ program -// -// private val stackPointer = Register("R31", BitVecType(64)) -// -// /** Default implementation of eval. -// */ -// def eval(cmd: Command, s: Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]]): Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]] = { -// -// } -// -// /** Transfer function for state lattice elements. -// */ -// def localTransfer(n: CFGPosition, s: Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]]): Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]] = n match { -// case r: Command => -// r match { -// // assignments -// case la: LocalAssign => -// if (la.lhs == stackPointer) { -// val reachingDefs = getDefinition(la.lhs, n, reachingDefs) -// val rhs = eval(la.rhs, s, n, reachingDefs) -// val rhsLocations = rhs.locations -// val rhsDefinitions = rhs.definitions -// val lhs = AbstractSP(rhsLocations, rhsDefinitions) -// s + (la.lhs -> FlatEl(lhs)) -// } else { -// s + (la.lhs -> eval(la.rhs, s)) -// } -// -// val lhsWrappers = s.collect { -// case (k, v) if RegisterWrapperPartialEquality(k.variable, k.assigns) == RegisterWrapperPartialEquality(la.lhs, getDefinition(la.lhs, r, reachingDefs)) => (k, v) -// } -// if (lhsWrappers.nonEmpty) { -// s ++ lhsWrappers.map((k, v) => (RegisterWrapperEqualSets(k.variable, k.assigns ++ getDefinition(la.lhs, r, reachingDefs)), v.union(eval(la.rhs, s, r)))) -// } else { -// s + (RegisterWrapperEqualSets(la.lhs, getDefinition(la.lhs, r, reachingDefs)) -> eval(la.rhs, s, n)) -// } -// // all others: like no-ops -// case _ => s -// } -// case _ => s -// } -// -// /** Transfer function for state lattice elements. -// */ -// def transfer(n: CFGPosition, s: Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]]): Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]] = localTransfer(n, s) -//} -// -//class AbstractSPAnalysisSolver(program: Program, constantProp: Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]]) extends AbstractSPAnalysis(program, constantProp) -// with IRIntraproceduralForwardDependencies -// with Analysis[Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]]] -// with SimpleWorklistFixpointSolver[CFGPosition, Map[RegisterWrapperPartialEquality, FlatElement[AbstractSP]], MapLattice[RegisterWrapperPartialEquality, FlatElement[AbstractSP], FlatLattice[AbstractSP]]] { -//} \ No newline at end of file diff --git a/src/main/scala/ir/transforms/IndirectCallResolution.scala b/src/main/scala/ir/transforms/IndirectCallResolution.scala index f0f9c338b..c4c9d894b 100644 --- a/src/main/scala/ir/transforms/IndirectCallResolution.scala +++ b/src/main/scala/ir/transforms/IndirectCallResolution.scala @@ -202,7 +202,7 @@ def resolveIndirectCallsUsingVSA( case _ => case None => case dataRegion: DataRegion => - names.add(dataRegion.regionIdentifier) + names.addAll(dataRegion.relfContent) vsaResult.get(n) match case Some(value) => value match case Lift(el) => el.get(dataRegion) match From 874b5b09540db15b6aad6b15bf9f336479577f4c Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Thu, 17 Oct 2024 18:01:37 +1000 Subject: [PATCH 083/104] VSA results feedback --- .../scala/analysis/GlobalRegionAnalysis.scala | 75 +++++- src/main/scala/analysis/MemoryModelMap.scala | 19 +- .../scala/analysis/MemoryRegionAnalysis.scala | 12 +- src/main/scala/analysis/RegionInjector.scala | 221 ++---------------- src/main/scala/analysis/VSA.scala | 8 +- src/main/scala/util/RunUtils.scala | 13 +- 6 files changed, 113 insertions(+), 235 deletions(-) diff --git a/src/main/scala/analysis/GlobalRegionAnalysis.scala b/src/main/scala/analysis/GlobalRegionAnalysis.scala index 5e61463a9..e85940136 100644 --- a/src/main/scala/analysis/GlobalRegionAnalysis.scala +++ b/src/main/scala/analysis/GlobalRegionAnalysis.scala @@ -10,7 +10,8 @@ trait GlobalRegionAnalysis(val program: Program, val constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], val reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], val mmm: MemoryModelMap, - val globalOffsets: Map[BigInt, BigInt]) { + val globalOffsets: Map[BigInt, BigInt], + val vsaResult: Option[Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]]) { var dataCount: Int = 0 private def nextDataCount() = { @@ -48,6 +49,21 @@ trait GlobalRegionAnalysis(val program: Program, def getDataMap: mutable.HashMap[BigInt, DataRegion] = dataMap + def resolveGlobalOffsetSecondLast(address: BigInt): BigInt = { + var tableAddress = address + // addresses may be layered as in jumptable2 example for which recursive search is required + var exitLoop = false + while (globalOffsets.contains(tableAddress) && globalOffsets.contains(globalOffsets(tableAddress)) && !exitLoop) { + val newAddress = globalOffsets.getOrElse(tableAddress, tableAddress) + if (newAddress == tableAddress) { + exitLoop = true + } else { + tableAddress = newAddress + } + } + tableAddress + } + def tryCoerceIntoData(exp: Expr, n: Command, subAccess: BigInt): Set[DataRegion] = { val eval = evaluateExpression(exp, constantProp(n)) if (eval.isDefined) { @@ -69,13 +85,15 @@ trait GlobalRegionAnalysis(val program: Program, val firstArg = tryCoerceIntoData(arg1, n, subAccess) var regions = Set.empty[DataRegion] for (i <- firstArg) { - if (globalOffsets.contains(i.start)) { - val newExpr = BinaryExpr(op, BitVecLiteral(globalOffsets(i.start), evalArg2.get.size), evalArg2.get) - regions = regions ++ tryCoerceIntoData(newExpr, n, subAccess) - } else { - val newExpr = BinaryExpr(op, BitVecLiteral(i.start, evalArg2.get.size), evalArg2.get) - regions = regions ++ tryCoerceIntoData(newExpr, n, subAccess) - } +// if (globalOffsets.contains(i.start) && globalOffsets.contains(globalOffsets(i.start))) { // get the first base address +// val newExpr = BinaryExpr(op, BitVecLiteral(globalOffsets(i.start), evalArg2.get.size), evalArg2.get) +// regions = regions ++ tryCoerceIntoData(newExpr, n, subAccess) +// } else { +// val newExpr = BinaryExpr(op, BitVecLiteral(i.start, evalArg2.get.size), evalArg2.get) +// regions = regions ++ tryCoerceIntoData(newExpr, n, subAccess) +// } + val newExpr = BinaryExpr(op, BitVecLiteral(resolveGlobalOffsetSecondLast(i.start), evalArg2.get.size), evalArg2.get) + regions = regions ++ tryCoerceIntoData(newExpr, n, subAccess) } return regions } @@ -87,7 +105,39 @@ trait GlobalRegionAnalysis(val program: Program, var collage = Set.empty[DataRegion] for (i <- ctx) { if (i != n) { - val tryVisit = localTransfer(i, Set.empty) + var tryVisit = Set.empty[DataRegion] + if (vsaResult.isDefined) { + vsaResult.get.get(i) match + case Some(value) => value match + case Lift(el) => el.get(i.lhs) match + case Some(value) => value.map { + case addressValue: AddressValue => + // find what the region contains + vsaResult.get.get(i) match + case Some(value) => value match + case Lift(el) => el.get(addressValue.region) match + case Some(value) => value.map { + case addressValue: AddressValue => + addressValue.region match + case region: DataRegion => + tryVisit = tryVisit + region + case _ => + case literalValue: LiteralValue => + } + case None => + case LiftedBottom => + case _ => + case None => + case literalValue: LiteralValue => + } + case None => + case LiftedBottom => + case _ => + case None => + } + if (tryVisit.isEmpty) { + tryVisit = localTransfer(i, Set.empty) + } if (tryVisit.nonEmpty) { collage = collage ++ tryVisit } @@ -120,7 +170,7 @@ trait GlobalRegionAnalysis(val program: Program, * @param n CFGPosition * @return Set[DataRegion] */ - private def checkIfDefined(dataRegions: Set[DataRegion], n: CFGPosition): Set[DataRegion] = { + def checkIfDefined(dataRegions: Set[DataRegion], n: CFGPosition): Set[DataRegion] = { var returnSet = Set.empty[DataRegion] for (i <- dataRegions) { val (f, p) = mmm.findDataObjectWithSize(i.start, i.size) @@ -173,8 +223,9 @@ class GlobalRegionAnalysisSolver( constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], mmm: MemoryModelMap, - globalOffsets: Map[BigInt, BigInt] - ) extends GlobalRegionAnalysis(program, domain, constantProp, reachingDefs, mmm, globalOffsets) + globalOffsets: Map[BigInt, BigInt], + vsaResult: Option[Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]] + ) extends GlobalRegionAnalysis(program, domain, constantProp, reachingDefs, mmm, globalOffsets, vsaResult) with IRIntraproceduralForwardDependencies with Analysis[Map[CFGPosition, Set[DataRegion]]] with SimpleWorklistFixpointSolver[CFGPosition, Set[DataRegion], PowersetLattice[DataRegion]] \ No newline at end of file diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index ee0c0cb2d..3d640a0d3 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -113,12 +113,11 @@ class MemoryModelMap { * This is because when regions are found, the relocated address is used and as such match * the correct range. * - * @param name * @param address * @param globalOffsets * @return BitVector: a BitVector representing the actual address */ - private def resolveInverseGlobalOffset(name: String, address: BigInt, globalOffsets: Map[BigInt, BigInt]): BigInt = { + private def resolveInverseGlobalOffset(address: BigInt, globalOffsets: Map[BigInt, BigInt]): BigInt = { val inverseGlobalOffsets = globalOffsets.map(_.swap) var tableAddress = inverseGlobalOffsets.getOrElse(address, address) // addresses may be layered as in jumptable2 example for which recursive search is required @@ -131,13 +130,12 @@ class MemoryModelMap { tableAddress = newAddress } } - tableAddress } def preLoadGlobals(externalFunctions: Map[BigInt, String], globalOffsets: Map[BigInt, BigInt], globalAddresses: Map[BigInt, String], globalSizes: Map[String, Int]): Unit = { // map externalFunctions name, value to DataRegion(name, value) and then sort by value - val reversedExternalFunctionRgns = externalFunctions.map((offset, name) => resolveInverseGlobalOffset(name, offset, globalOffsets) -> name) + val reversedExternalFunctionRgns = externalFunctions.map((offset, name) => resolveInverseGlobalOffset(offset, globalOffsets) -> name) val filteredGlobalOffsets = globalAddresses.filterNot((offset, name) => reversedExternalFunctionRgns.contains(offset)) val externalFunctionRgns = (reversedExternalFunctionRgns ++ filteredGlobalOffsets).map((offset, name) => DataRegion(name, offset, (globalSizes.getOrElse(name, 1).toDouble / 8).ceil.toInt)) @@ -188,8 +186,19 @@ class MemoryModelMap { for (regions <- mergeRegions) { uf.bulkUnion(regions) } + + /* this is done because the stack regions will change after MMM transforms them + and merges some of them based on size, thus we need to alter the results of + the analysis to match MMM transformations + TODO: Can this be done directly in MRA? + */ + for ((n, stacks) <- stackAllocationSites) { + pushContext(IRWalk.procedure(n).name) + stackAllocationSites(n) = stacks.map(r => findStackObject(r.start).getOrElse(r)) + pushContext(IRWalk.procedure(n).name) + } } - // TODO: push and pop could be optimised by caching the results + def pushContext(funName: String): Unit = { contextStack.push(funName) stackMap.clear() diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 25170ef38..8067ac889 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -37,20 +37,20 @@ trait MemoryRegionAnalysis(val program: Program, * Controls the pool of stack regions. Each pool is unique to a function. * If the offset has already been defined in the context of the function, then the same region is returned. * - * @param expr : the offset + * @param base : the offset * @param parent : the function entry node * @return the stack region corresponding to the offset */ - private def poolMaster(expr: BigInt, stackBase: Procedure, subAccess: BigInt): StackRegion = { + private def poolMaster(base: BigInt, stackBase: Procedure, subAccess: BigInt): StackRegion = { assert(subAccess >= 0) val stackPool = stackMap.getOrElseUpdate(stackBase, mutable.HashMap()) var region: StackRegion = null - if (stackPool.contains(expr)) { - region = stackPool(expr) + if (stackPool.contains(base)) { + region = stackPool(base) } else { - val newRegion = StackRegion(nextStackCount(), expr, stackBase) + val newRegion = StackRegion(nextStackCount(), base, stackBase) addReturnStack(stackBase, newRegion) - stackPool += (expr -> newRegion) + stackPool += (base -> newRegion) region = newRegion } region.subAccesses.add((subAccess.toDouble/8).ceil.toInt) diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index 5e0975fcf..04394f162 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -24,205 +24,6 @@ class RegionInjector(domain: mutable.Set[CFGPosition], program.readOnlyMemory = transformMemorySections(program.readOnlyMemory) } - /** - * In expressions that have accesses within a region, we need to relocate - * the base address to the actual address using the relocation table. - * MUST RELOCATE because MMM iterate to find the lowest address - * TODO: May need to iterate over the relocation table to find the actual address - * - * @param address - * @param globalOffsets - * @return BitVecLiteral: the relocated address - */ - def relocatedBase(address: BigInt, globalOffsets: Map[BigInt, BigInt]): BitVecLiteral = { - val tableAddress = globalOffsets.getOrElse(address, address) - // this condition checks if the address is not layered and returns if it is not - if (tableAddress != address && !globalOffsets.contains(tableAddress)) { - return BitVecLiteral(address, 64) - } - BitVecLiteral(tableAddress, 64) - } - - /** - * Used to reduce an expression that may be a sub-region of a memory region. - * Pointer reduction example: - * R2 = R31 + 20 - * Mem[R2 + 8] <- R1 - * - * Steps: - * 1) R2 = R31 + 20 <- ie. stack access (assume R31 = stackPointer) - * ↓ - * R2 = StackRegion("stack_1", 20) - * - * 2) Mem[R2 + 8] <- R1 <- ie. memStore - * ↓ - * (StackRegion("stack_1", 20) + 8) <- R1 - * ↓ - * MMM.get(20 + 8) <- R1 - * - * @param binExpr - * @param n - * @return Set[MemoryRegion]: a set of regions that the expression may be pointing to - */ - def reducibleToRegion(binExpr: BinaryExpr, n: Command): Set[MemoryRegion] = { - var reducedRegions = Set.empty[MemoryRegion] - binExpr.arg1 match { - case variable: Variable => - val b = evaluateExpression(binExpr, constantProp(n)) - if (b.isDefined) { - val region = mmm.findDataObject(b.get.value) - reducedRegions = reducedRegions ++ region - } - if (reducedRegions.nonEmpty) { - return reducedRegions - } - val ctx = getUse(variable, n, reachingDefs) - for (i <- ctx) { - if (i != n) { // handles loops (ie. R19 = R19 + 1) %00000662 in jumptable2 - val regions = i.rhs match { - case loadL: MemoryLoad => - val foundRegions = exprToRegion(loadL.index, i) - val toReturn = mutable.Set[MemoryRegion]().addAll(foundRegions) - for { - f <- foundRegions - } { - // TODO: Must enable this (probably need to calculate those contents beforehand) -// if (memoryRegionContents.contains(f)) { -// memoryRegionContents(f).foreach { -// case b: BitVecLiteral => -// // val region = mmm.findDataObject(b.value) -// // if (region.isDefined) { -// // toReturn.addOne(region.get) -// // } -// case r: MemoryRegion => -// toReturn.addOne(r) -// toReturn.remove(f) -// } -// } - } - toReturn.toSet - case _: BitVecLiteral => - Set.empty[MemoryRegion] - case _ => - //println(s"Unknown expression: ${i}") - //println(ctx) - exprToRegion(i.rhs, i) - } - val result = evaluateExpression(binExpr.arg2, constantProp(n)) - if (result.isDefined) { - val b = result.get - for { - r <- regions - } { - r match { - case stackRegion: StackRegion => - //println(s"StackRegion: ${stackRegion.start}") - //println(s"BitVecLiteral: ${b}") - //if (b.size == stackRegion.start.size) { TODO: Double check why this is needed - val nextOffset = bitVectorOpToBigIntOp(binExpr.op, stackRegion.start, b.value) - reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, BitVecLiteral(nextOffset, 64)), n) - //} - case dataRegion: DataRegion => - //val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start, globalOffsets), b) - val nextOffset = bitVectorOpToBigIntOp(binExpr.op, dataRegion.start, b.value) - reducedRegions ++= exprToRegion(BitVecLiteral(nextOffset, 64), n) - case _ => - } - } - } - } - } - case _ => - } - reducedRegions - } - - /** - * Finds a region for a given expression using MMM results - * - * @param expr - * @param n - * @return Set[MemoryRegion]: a set of regions that the expression may be pointing to - */ - def exprToRegion(expr: Expr, n: Command): Set[MemoryRegion] = { - var res = Set[MemoryRegion]() - mmm.popContext() - mmm.pushContext(IRWalk.procedure(n).name) - expr match { // TODO: Stack detection here should be done in a better way or just merged with data - case binOp: BinaryExpr if binOp.arg1 == stackPointer => - val b = evaluateExpression(binOp.arg2, constantProp(n)) - if (b.isDefined) { - if binOp.arg2.variables.exists { v => v.sharedVariable } then { - Logger.debug("Shared stack object: " + b) - Logger.debug("Shared in: " + expr) - val regions = mmm.findSharedStackObject(b.get.value) - Logger.debug("found: " + regions) - res ++= regions - } else { - if (isNegative(b.get)) { - val region = mmm.findStackObject(0) - if (region.isDefined) { - res = res + region.get - } - } - val region = mmm.findStackObject(b.get.value) - if (region.isDefined) { - res = res + region.get - } - } - } - case binaryExpr: BinaryExpr => - res ++= reducibleToRegion(binaryExpr, n) - case v: Variable if v == stackPointer => - res ++= mmm.findStackObject(0) - case v: Variable => - val b = evaluateExpression(expr, constantProp(n)) - if (b.isDefined) { - Logger.debug("BitVecLiteral: " + b) - val region = mmm.findDataObject(b.get.value) - if (region.isDefined) { - res += region.get - } - } - if (res.isEmpty) { - val ctx = getDefinition(v, n, reachingDefs) - for (i <- ctx) { - i.rhs match { - case be: BinaryExpr => - res = res ++ exprToRegion(eval(i.rhs, i), n) - case _ => - } - } - } - - if (res.isEmpty) { // may be passed as param - val ctx = getUse(v, n, reachingDefs) - for (i <- ctx) { - i.rhs match { - case load: MemoryLoad => // treat as a region - res ++= exprToRegion(load.index, i) - case binaryExpr: BinaryExpr => - res ++= reducibleToRegion(binaryExpr, i) - case _ => // also treat as a region (for now) even if just Base + Offset without memLoad - res ++= exprToRegion(i.rhs, i) - } - } - } - case load: MemoryLoad => // treat as a region - res ++= exprToRegion(load.index, n) - case _ => - val b = evaluateExpression(expr, constantProp(n)) - if (b.isDefined) { - Logger.debug("BitVecLiteral: " + b) - val region = mmm.findDataObject(b.get.value) - if (region.isDefined) { - res += region.get - } - } - } - res - } - /** Default implementation of eval. */ def eval(expr: Expr, cmd: Command): Expr = { @@ -245,12 +46,23 @@ class RegionInjector(domain: mutable.Set[CFGPosition], BinaryExpr(op, eval(arg1, cmd), eval(arg2, cmd)) case MemoryLoad(mem, index, endian, size) => // TODO: index should be replaced region - MemoryLoad(renameMemory(mem, index, cmd), eval(index, cmd), endian, size) + MemoryLoad(renameMemory(mem, cmd), eval(index, cmd), endian, size) case variable: Variable => variable // ignore variables } - def renameMemory(mem: Memory, expr: Expr, cmd : Command): Memory = { - val regions = exprToRegion(eval(expr, cmd), cmd) + def nodeToRegion(n: CFGPosition): Set[MemoryRegion] = { + var returnRegions = Set.empty[MemoryRegion] + n match { + case directCall: DirectCall => + returnRegions = returnRegions + mmm.getHeap(directCall).asInstanceOf[MemoryRegion] + case _ => + returnRegions = returnRegions ++ mmm.getStack(n).asInstanceOf[Set[MemoryRegion]] ++ mmm.getData(n).asInstanceOf[Set[MemoryRegion]] + } + returnRegions + } + + def renameMemory(mem: Memory, cmd : Command): Memory = { + val regions = nodeToRegion(cmd) if (regions.size == 1) { Logger.debug(s"Mem CMD is: ${cmd}") Logger.debug(s"Region found for mem: ${regions.head}") @@ -271,8 +83,7 @@ class RegionInjector(domain: mutable.Set[CFGPosition], // case _ => // } } else { - Logger.debug(s"Mem CMD is: ${cmd}") - Logger.debug(s"No region found for expr ${expr} regions size is ${regions.size}") + Logger.debug(s"No region found for cmd ${cmd} regions size is ${regions.size}") } mem } @@ -283,7 +94,7 @@ class RegionInjector(domain: mutable.Set[CFGPosition], case assign: Assign => assign.rhs = eval(assign.rhs, assign) case mAssign: MemoryAssign => - mAssign.mem = renameMemory(mAssign.mem, mAssign.index, mAssign) + mAssign.mem = renameMemory(mAssign.mem, mAssign) mAssign.index = eval(mAssign.index, mAssign) mAssign.value = eval(mAssign.value, mAssign) case assert: Assert => diff --git a/src/main/scala/analysis/VSA.scala b/src/main/scala/analysis/VSA.scala index bbdcb1d60..7c03e27f6 100644 --- a/src/main/scala/analysis/VSA.scala +++ b/src/main/scala/analysis/VSA.scala @@ -50,8 +50,8 @@ trait ValueSetAnalysis(domain: Set[CFGPosition], returnRegions } - def canCoerceIntoDataRegion(bitVecLiteral: BitVecLiteral): Option[DataRegion] = { - mmm.isDataBase(bitVecLiteral.value) + def canCoerceIntoDataRegion(bitVecLiteral: BitVecLiteral, size: Int): Option[DataRegion] = { + mmm.findDataObject(bitVecLiteral.value) } /** Default implementation of eval. @@ -71,7 +71,7 @@ trait ValueSetAnalysis(domain: Set[CFGPosition], } else { evaluateExpression(localAssign.rhs, constantProp(n)) match case Some(bitVecLiteral: BitVecLiteral) => - val possibleData = canCoerceIntoDataRegion(bitVecLiteral) + val possibleData = canCoerceIntoDataRegion(bitVecLiteral, 1) if (possibleData.isDefined) { m = m + (localAssign.lhs -> Set(AddressValue(possibleData.get))) } else { @@ -92,7 +92,7 @@ trait ValueSetAnalysis(domain: Set[CFGPosition], evaluateExpression(memAssign.value, constantProp(n)) match case Some(bitVecLiteral: BitVecLiteral) => regions.foreach { r => - val possibleData = canCoerceIntoDataRegion(bitVecLiteral) + val possibleData = canCoerceIntoDataRegion(bitVecLiteral, memAssign.size) if (possibleData.isDefined) { m = m + (r -> Set(AddressValue(possibleData.get))) } else { diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 32fbecd01..93e60bd12 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -274,7 +274,8 @@ object StaticAnalysis { def analyse( ctx: IRContext, config: StaticAnalysisConfig, - iteration: Int + iteration: Int, + previousResults: Option[StaticAnalysisContext] = None ): StaticAnalysisContext = { val IRProgram: Program = ctx.program val externalFunctions: Set[ExternalFunction] = ctx.externalFunctions @@ -364,7 +365,13 @@ object StaticAnalysis { val mmm = MemoryModelMap() mmm.preLoadGlobals(mergedSubroutines, globalOffsets, globalAddresses, globalSizes) - val graSolver = GlobalRegionAnalysisSolver(IRProgram, domain.toSet, constPropResult, reachingDefinitionsAnalysisResults, mmm, globalOffsets) + var previousVSAResults = Option.empty[Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]] + if (previousResults.isDefined) { + previousVSAResults = Some(previousResults.get.vsaResult) + } + + Logger.debug("[!] Running GRA") + val graSolver = GlobalRegionAnalysisSolver(IRProgram, domain.toSet, constPropResult, reachingDefinitionsAnalysisResults, mmm, globalOffsets, previousVSAResults) val graResult = graSolver.analyze() Logger.debug("[!] Running MRA") @@ -552,7 +559,7 @@ object RunUtils { val analysisResult = mutable.ArrayBuffer[StaticAnalysisContext]() while (modified) { Logger.debug("[!] Running Static Analysis") - val result = StaticAnalysis.analyse(ctx, config, iteration) + val result = StaticAnalysis.analyse(ctx, config, iteration, analysisResult.lastOption) analysisResult.append(result) Logger.debug("[!] Replacing Indirect Calls") // modified = transforms.resolveIndirectCallsUsingPointsTo( From 71f1cba883e2bfaecd808dad6ccaae7f03db3141 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Tue, 22 Oct 2024 07:54:28 +1000 Subject: [PATCH 084/104] Handling multiple regions --- src/main/scala/analysis/ActualVSAold.scala | 60 ----------------- .../scala/analysis/GlobalRegionAnalysis.scala | 15 +++++ src/main/scala/analysis/MemoryModelMap.scala | 65 ++++++++++++++++++- .../scala/analysis/MemoryRegionAnalysis.scala | 12 ++-- src/main/scala/analysis/RegionInjector.scala | 17 ++--- 5 files changed, 94 insertions(+), 75 deletions(-) delete mode 100644 src/main/scala/analysis/ActualVSAold.scala diff --git a/src/main/scala/analysis/ActualVSAold.scala b/src/main/scala/analysis/ActualVSAold.scala deleted file mode 100644 index 2de052c1c..000000000 --- a/src/main/scala/analysis/ActualVSAold.scala +++ /dev/null @@ -1,60 +0,0 @@ -//package analysis -// -//import ir.* -//import analysis.solvers._ -// -//import scala.collection.immutable -// -//trait ActualVSA(program: Program) { -// -// val powersetLattice: PowersetLattice[Variable] = PowersetLattice() -// -// val lattice: MapLattice[CFGPosition, Set[Variable], PowersetLattice[Variable]] = MapLattice(powersetLattice) -// -// val domain: Set[CFGPosition] = Set.empty ++ program -// -// private val stackPointer = Register("R31", BitVecType(64)) -// private val linkRegister = Register("R30", BitVecType(64)) -// private val framePointer = Register("R29", BitVecType(64)) -// -// private val ignoreRegions: Set[Expr] = Set(linkRegister, framePointer, stackPointer) -// -// /** Default implementation of eval. -// */ -// def eval(cmd: Command, s: Set[Variable]): Set[Variable] = { -// var m = s -// cmd match { -// case assume: Assume => -// m.diff(assume.body.variables) -// case assert: Assert => -// m.diff(assert.body.variables) -// case memoryAssign: MemoryAssign => -// m.diff(memoryAssign.lhs.variables ++ memoryAssign.rhs.variables) -// case indirectCall: IndirectCall => -// m - indirectCall.target -// case localAssign: LocalAssign => -// m = m.diff(localAssign.rhs.variables) -// if ignoreRegions.contains(localAssign.lhs) then m else m + localAssign.lhs -// case _ => -// m -// } -// } -// -// /** Transfer function for state lattice elements. -// */ -// def localTransfer(n: CFGPosition, s: Set[Variable]): Set[Variable] = n match { -// case cmd: Command => -// eval(cmd, s) -// case _ => s // ignore other kinds of nodes -// } -// -// /** Transfer function for state lattice elements. -// */ -// def transfer(n: CFGPosition, s: Set[Variable]): Set[Variable] = localTransfer(n, s) -//} -// -//class ANRAnalysisSolver(program: Program) extends ANRAnalysis(program) -// with IRIntraproceduralForwardDependencies -// with Analysis[Map[CFGPosition, Set[Variable]]] -// with SimpleWorklistFixpointSolver[CFGPosition, Set[Variable], PowersetLattice[Variable]] { -//} \ No newline at end of file diff --git a/src/main/scala/analysis/GlobalRegionAnalysis.scala b/src/main/scala/analysis/GlobalRegionAnalysis.scala index e85940136..ab2a46d8a 100644 --- a/src/main/scala/analysis/GlobalRegionAnalysis.scala +++ b/src/main/scala/analysis/GlobalRegionAnalysis.scala @@ -159,6 +159,18 @@ trait GlobalRegionAnalysis(val program: Program, tryCoerceIntoData(index, n, size) } +// def mergeRegions(regions: Set[DataRegion]): DataRegion = { +// if (regions.size == 1) { +// return regions.head +// } +// val start = regions.minBy(_.start).start +// val end = regions.maxBy(_.end).end +// val size = end - start +// val newRegion = DataRegion(nextDataCount(), start, size) +// regions.foreach(i => dataMap(i.start) = newRegion) +// newRegion +// } + /** * Check if the data region is defined. * Finds full and partial matches @@ -188,6 +200,9 @@ trait GlobalRegionAnalysis(val program: Program, } } } + if (returnSet.size > 1) { + mmm.addMergeRegions(returnSet.asInstanceOf[Set[MemoryRegion]], nextDataCount()) + } returnSet } diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index 3d640a0d3..31ed7781e 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -4,8 +4,12 @@ import analysis.* import ir.* import util.Logger +import scala.collection.immutable.TreeMap import scala.collection.mutable +enum MemoryType: + case Data, Heap, Stack + // Define a case class to represent a range case class RangeKey(start: BigInt, end: BigInt) extends Ordered[RangeKey]: val size: BigInt = end - start + 1 @@ -33,10 +37,56 @@ class MemoryModelMap { private val dataMap: mutable.Map[RangeKey, DataRegion] = mutable.TreeMap() private val cfgPositionToDataRegion: mutable.Map[CFGPosition, Set[DataRegion]] = mutable.Map() private val heapCalls: mutable.Map[DirectCall, HeapRegion] = mutable.Map() + private val mergedRegions: mutable.Map[Set[MemoryRegion], String] = mutable.Map() private val stackAllocationSites: mutable.Map[CFGPosition, Set[StackRegion]] = mutable.Map() private val uf = new UnionFind() + private var DataMemory, HeapMemory, StackMemory = TreeMap[BigInt, Array[Byte]]() + + + + // Store operation: store BigInt value at a BigInt address + def store(address: BigInt, value: BigInt, memoryType: MemoryType): Unit = { + val byteArray = value.toByteArray + memoryType match + case MemoryType.Data => DataMemory += (address -> byteArray) + case MemoryType.Heap => HeapMemory += (address -> byteArray) + case MemoryType.Stack => StackMemory += (address -> byteArray) + } + + // Load operation: load from a BigInt address with a specific size + def load(address: BigInt, size: Int, memoryType: MemoryType): BigInt = { + val memory = memoryType match + case MemoryType.Data => DataMemory + case MemoryType.Heap => HeapMemory + case MemoryType.Stack => StackMemory + // Find the memory block that contains the starting address + val floorEntry = memory.rangeTo(address).lastOption + + floorEntry match { + case Some((startAddress, byteArray)) => + val offset = (address - startAddress).toInt // Offset within the byte array + // If the load exceeds the stored data, we need to handle padding with zeros + if (offset >= byteArray.length) { + BigInt(0) + } else { + // Calculate how much data we can retrieve + val availableSize = byteArray.length - offset + // Slice the available data, and if requested size exceeds, append zeros + val result = byteArray.slice(offset, offset + size) + val paddedResult = if (size > availableSize) { + result ++ Array.fill(size - availableSize)(0.toByte) // Padding with zeros + } else { + result + } + BigInt(1, paddedResult) // Convert the byte array back to BigInt + } + case None => + // If no memory is stored at the requested address, return zero + BigInt(0) // TODO: may need to be sm else + } + } /** Add a range and object to the mapping * @@ -49,7 +99,7 @@ class MemoryModelMap { def maxSize(r: MemoryRegion): BigInt = { r match case DataRegion(regionIdentifier, start, size) => start + size - case HeapRegion(regionIdentifier, size, parent) => ??? + case HeapRegion(regionIdentifier, start, size, parent) => ??? case StackRegion(regionIdentifier, start, parent) => if (r.subAccesses.nonEmpty) { val max = start + r.subAccesses.max @@ -159,6 +209,8 @@ class MemoryModelMap { if (obj.isEmpty) { Logger.debug(s"Data region $dr not found in the new data map") } else { + val address = dr.start + val size = dr.size obj.get.relfContent.add(dr.regionIdentifier) } } @@ -506,10 +558,19 @@ class MemoryModelMap { def getData(cfgPosition: CFGPosition): Set[DataRegion] = { cfgPositionToDataRegion.getOrElse(cfgPosition, Set.empty).map(returnRegion) } + + def addMergeRegions(regions: Set[MemoryRegion], name: String): Unit = { + mergedRegions(regions) = name + } + + def getMergedName(regions: Set[MemoryRegion]): String = { + mergedRegions(regions) + } } trait MemoryRegion { val regionIdentifier: String + val start: BigInt val subAccesses: mutable.Set[BigInt] = mutable.Set() } @@ -517,7 +578,7 @@ case class StackRegion(override val regionIdentifier: String, start: BigInt, par override def toString: String = s"Stack($regionIdentifier, $start, ${parent.name}, $subAccesses)" } -case class HeapRegion(override val regionIdentifier: String, size: BigInt, parent: Procedure) extends MemoryRegion { +case class HeapRegion(override val regionIdentifier: String, start: BigInt, size: BigInt, parent: Procedure) extends MemoryRegion { override def toString: String = s"Heap($regionIdentifier, $size)" } diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 8067ac889..362d3b9cc 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -19,13 +19,14 @@ trait MemoryRegionAnalysis(val program: Program, val reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], val graResult: Map[CFGPosition, Set[DataRegion]]) { - var mallocCount: Int = 0 + var mallocCount: BigInt = 0 private var stackCount: Int = 0 val stackMap: mutable.Map[Procedure, mutable.Map[BigInt, StackRegion]] = mutable.Map() - private def nextMallocCount() = { - mallocCount += 1 - s"malloc_$mallocCount" + private def nextMallocCount(size: BigInt) = { + val start = mallocCount + mallocCount += (size.toDouble/8).ceil.toInt + 1 + (s"malloc_$mallocCount", start) } private def nextStackCount() = { @@ -216,7 +217,8 @@ trait MemoryRegionAnalysis(val program: Program, evaluateExpression(mallocVariable, constantProp(n)) match { case Some(b: BitVecLiteral) => val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value - val newHeapRegion = HeapRegion(nextMallocCount(), negB, IRWalk.procedure(n)) + val (name, start) = nextMallocCount(negB) + val newHeapRegion = HeapRegion(name, start, negB, IRWalk.procedure(n)) addReturnHeap(directCall, newHeapRegion) s case None => s diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index 04394f162..05b024c11 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -74,14 +74,15 @@ class RegionInjector(domain: mutable.Set[CFGPosition], case _ => } } else if (regions.size > 1) { - //throw RuntimeException("Multiple regions found for memory") -// mmm.mergeRegions(regions) match { -// case stackRegion: StackRegion => -// return StackMemory(stackRegion.regionIdentifier, mem.addressSize, mem.valueSize) -// case dataRegion: DataRegion => -// return SharedMemory(dataRegion.regionIdentifier, mem.addressSize, mem.valueSize) -// case _ => -// } + val hasToBeDefined = mmm.getMergedName(regions) + Logger.debug(s"Multiple regions found for cmd ${cmd} regions size is ${regions.size}") + regions.head match { + case stackRegion: StackRegion => + return StackMemory(hasToBeDefined, mem.addressSize, mem.valueSize) + case dataRegion: DataRegion => + return SharedMemory(hasToBeDefined, mem.addressSize, mem.valueSize) + case _ => + } } else { Logger.debug(s"No region found for cmd ${cmd} regions size is ${regions.size}") } From 277a3fc0185466e8f627f5b0363d41b16e4f4918 Mon Sep 17 00:00:00 2001 From: l-kent Date: Tue, 22 Oct 2024 12:56:27 +1000 Subject: [PATCH 085/104] use visitor to resolve specification variables --- src/main/scala/boogie/BExpr.scala | 182 ++----------- src/main/scala/boogie/BVisitor.scala | 247 ++++++++++++++++++ .../scala/specification/Specification.scala | 123 +-------- src/main/scala/translating/IRToBoogie.scala | 32 +-- 4 files changed, 288 insertions(+), 296 deletions(-) create mode 100644 src/main/scala/boogie/BVisitor.scala diff --git a/src/main/scala/boogie/BExpr.scala b/src/main/scala/boogie/BExpr.scala index 2c5b5c1d8..c6249bbd8 100644 --- a/src/main/scala/boogie/BExpr.scala +++ b/src/main/scala/boogie/BExpr.scala @@ -12,20 +12,12 @@ trait BExpr { def globals: Set[BVar] = Set() def specGlobals: Set[SpecGlobalOrAccess] = Set() def oldSpecGlobals: Set[SpecGlobalOrAccess] = Set() - def resolveSpec: BExpr = this - def resolveOld: BExpr = this - def removeOld: BExpr = this - def resolveSpecL: BExpr = this - def resolveInsideOld: BExpr = this - def resolveSpecParam: BExpr = this - def resolveSpecParamOld: BExpr = this - def resolveSpecInv: BExpr = this - def resolveSpecInvOld: BExpr = this def loads: Set[BExpr] = Set() def serialiseBoogie(w: Writer): Unit = w.append(toString) + def acceptVisit(visitor: BVisitor): BExpr = this } -trait BLiteral extends BExpr {} +trait BLiteral extends BExpr sealed trait BoolBLiteral extends BLiteral @@ -39,7 +31,6 @@ case object StarBLiteral extends BoolBLiteral { override def toString: String = "*" } - case object FalseBLiteral extends BoolBLiteral { override val getType: BType = BoolBType override def toString: String = "false" @@ -53,10 +44,13 @@ case class BitVecBLiteral(value: BigInt, size: Int) extends BLiteral { case class IntBLiteral(value: BigInt) extends BLiteral { override val getType: BType = IntBType override def toString: String = value.toString + /* override def resolveSpecL: BitVecBLiteral = BitVecBLiteral(value, 32) // TODO override def resolveSpec: BitVecBLiteral = BitVecBLiteral(value, 32) // TODO override def resolveOld: BitVecBLiteral = BitVecBLiteral(value, 32) // TODO override def removeOld: BitVecBLiteral = BitVecBLiteral(value, 32) // TODO + */ + override def acceptVisit(visitor: BVisitor): BExpr = visitor.visitIntBLiteral(this) } case class BVExtract(end: Int, start: Int, body: BExpr) extends BExpr { @@ -67,15 +61,6 @@ case class BVExtract(end: Int, start: Int, body: BExpr) extends BExpr { override def globals: Set[BVar] = body.globals override def specGlobals: Set[SpecGlobalOrAccess] = body.specGlobals override def oldSpecGlobals: Set[SpecGlobalOrAccess] = body.oldSpecGlobals - override def resolveSpec: BVExtract = copy(body = body.resolveSpec) - override def resolveSpecInv: BVExtract = copy(body = body.resolveSpecInv) - override def resolveSpecInvOld: BVExtract = copy(body = body.resolveSpecInvOld) - override def resolveSpecParam: BVExtract = copy(body = body.resolveSpecParam) - override def resolveSpecParamOld: BVExtract = copy(body = body.resolveSpecParamOld) - override def resolveSpecL: BVExtract = copy(body = body.resolveSpecL) - override def resolveOld: BVExtract = copy(body = body.resolveOld) - override def resolveInsideOld: BVExtract = copy(body = body.resolveInsideOld) - override def removeOld: BVExtract = copy(body = body.removeOld) override def loads: Set[BExpr] = body.loads override def serialiseBoogie(w: Writer): Unit = { @@ -83,6 +68,8 @@ case class BVExtract(end: Int, start: Int, body: BExpr) extends BExpr { w.append(s"[$end:$start]") } + override def acceptVisit(visitor: BVisitor): BExpr = visitor.visitBVExtract(this) + } case class BVRepeat(repeats: Int, body: BExpr) extends BExpr { @@ -111,16 +98,8 @@ case class BVRepeat(repeats: Int, body: BExpr) extends BExpr { override def globals: Set[BVar] = body.globals override def specGlobals: Set[SpecGlobalOrAccess] = body.specGlobals override def oldSpecGlobals: Set[SpecGlobalOrAccess] = body.oldSpecGlobals - override def resolveSpec: BVRepeat = copy(body = body.resolveSpec) - override def resolveSpecInv: BVRepeat = copy(body = body.resolveSpecInv) - override def resolveSpecInvOld: BVRepeat = copy(body = body.resolveSpecInvOld) - override def resolveSpecParam: BVRepeat = copy(body = body.resolveSpecParam) - override def resolveSpecParamOld: BVRepeat = copy(body = body.resolveSpecParamOld) - override def resolveSpecL: BVRepeat = copy(body = body.resolveSpecL) - override def resolveOld: BVRepeat = copy(body = body.resolveOld) - override def resolveInsideOld: BVRepeat = copy(body = body.resolveInsideOld) - override def removeOld: BVRepeat = copy(body = body.removeOld) override def loads: Set[BExpr] = body.loads + override def acceptVisit(visitor: BVisitor): BExpr = visitor.visitBVRepeat(this) } case class BVZeroExtend(extension: Int, body: BExpr) extends BExpr { @@ -150,16 +129,9 @@ case class BVZeroExtend(extension: Int, body: BExpr) extends BExpr { override def globals: Set[BVar] = body.globals override def specGlobals: Set[SpecGlobalOrAccess] = body.specGlobals override def oldSpecGlobals: Set[SpecGlobalOrAccess] = body.oldSpecGlobals - override def resolveSpec: BVZeroExtend = copy(body = body.resolveSpec) - override def resolveSpecInv: BVZeroExtend = copy(body = body.resolveSpecInv) - override def resolveSpecInvOld: BVZeroExtend = copy(body = body.resolveSpecInvOld) - override def resolveSpecParam: BVZeroExtend = copy(body = body.resolveSpecParam) - override def resolveSpecParamOld: BVZeroExtend = copy(body = body.resolveSpecParamOld) - override def resolveSpecL: BVZeroExtend = copy(body = body.resolveSpecL) - override def resolveOld: BExpr = copy(body = body.resolveOld) - override def resolveInsideOld: BExpr = copy(body = body.resolveInsideOld) - override def removeOld: BExpr = copy(body = body.removeOld) override def loads: Set[BExpr] = body.loads + + override def acceptVisit(visitor: BVisitor): BExpr = visitor.visitBVZeroExtend(this) } case class BVSignExtend(extension: Int, body: BExpr) extends BExpr { @@ -181,7 +153,6 @@ case class BVSignExtend(extension: Int, body: BExpr) extends BExpr { w.append(")") } - override def functionOps: Set[FunctionOp] = { val thisFn = BVFunctionOp(fnName, s"sign_extend $extension", List(BParam(BitVecBType(bodySize))), BParam(getType)) body.functionOps + thisFn @@ -190,16 +161,8 @@ case class BVSignExtend(extension: Int, body: BExpr) extends BExpr { override def globals: Set[BVar] = body.globals override def specGlobals: Set[SpecGlobalOrAccess] = body.specGlobals override def oldSpecGlobals: Set[SpecGlobalOrAccess] = body.oldSpecGlobals - override def resolveSpecL: BVSignExtend = copy(body = body.resolveSpecL) - override def resolveSpec: BVSignExtend = copy(body = body.resolveSpec) - override def resolveSpecInv: BVSignExtend = copy(body = body.resolveSpecInv) - override def resolveSpecInvOld: BVSignExtend = copy(body = body.resolveSpecInvOld) - override def resolveSpecParam: BVSignExtend = copy(body = body.resolveSpecParam) - override def resolveSpecParamOld: BVSignExtend = copy(body = body.resolveSpecParamOld) - override def resolveOld: BExpr = copy(body = body.resolveOld) - override def resolveInsideOld: BExpr = copy(body = body.resolveInsideOld) - override def removeOld: BExpr = copy(body = body.removeOld) override def loads: Set[BExpr] = body.loads + override def acceptVisit(visitor: BVisitor): BExpr = visitor.visitBVSignExtend(this) } abstract class BVar(val name: String, val bType: BType, val scope: Scope) extends BExpr with Ordered[BVar] { @@ -257,15 +220,8 @@ case class BFunctionCall(name: String, args: List[BExpr], outType: BType, uninte override def globals: Set[BVar] = args.flatMap(a => a.globals).toSet override def specGlobals: Set[SpecGlobalOrAccess] = args.flatMap(a => a.specGlobals).toSet override def oldSpecGlobals: Set[SpecGlobalOrAccess] = args.flatMap(a => a.oldSpecGlobals).toSet - override def resolveSpec: BFunctionCall = copy(args = args.map(a => a.resolveSpec)) - override def resolveSpecInv: BFunctionCall = copy(args = args.map(a => a.resolveSpecInv)) - override def resolveSpecInvOld: BFunctionCall = copy(args = args.map(a => a.resolveSpecInvOld)) - override def resolveSpecParam: BFunctionCall = copy(args = args.map(a => a.resolveSpecParam)) - override def resolveSpecParamOld: BFunctionCall = copy(args = args.map(a => a.resolveSpecParamOld)) - override def resolveSpecL: BFunctionCall = copy(args = args.map(a => a.resolveSpecL)) - override def resolveOld: BExpr = copy(args = args.map(a => a.resolveOld)) - override def removeOld: BExpr = copy(args = args.map(a => a.removeOld)) override def loads: Set[BExpr] = args.flatMap(a => a.loads).toSet + override def acceptVisit(visitor: BVisitor): BExpr = visitor.visitBFunctionCall(this) } case class UnaryBExpr(op: UnOp, arg: BExpr) extends BExpr { @@ -300,43 +256,9 @@ case class UnaryBExpr(op: UnOp, arg: BExpr) extends BExpr { override def globals: Set[BVar] = arg.globals override def specGlobals: Set[SpecGlobalOrAccess] = arg.specGlobals override def oldSpecGlobals: Set[SpecGlobalOrAccess] = arg.oldSpecGlobals - override def resolveSpec: UnaryBExpr = op match { - case i: IntUnOp => copy(op = i.toBV, arg = arg.resolveSpec) - case _ => copy(arg = arg.resolveSpec) - } - override def resolveSpecInv: UnaryBExpr = op match { - case i: IntUnOp => copy(op = i.toBV, arg = arg.resolveSpecInv) - case _ => copy(arg = arg.resolveSpecInv) - } - override def resolveSpecInvOld: UnaryBExpr = op match { - case i: IntUnOp => copy(op = i.toBV, arg = arg.resolveSpecInvOld) - case _ => copy(arg = arg.resolveSpecInvOld) - } - override def resolveSpecParam: UnaryBExpr = op match { - case i: IntUnOp => copy(op = i.toBV, arg = arg.resolveSpecParam) - case _ => copy(arg = arg.resolveSpecParam) - } - override def resolveSpecParamOld: UnaryBExpr = op match { - case i: IntUnOp => copy(op = i.toBV, arg = arg.resolveSpecParamOld) - case _ => copy(arg = arg.resolveSpecParamOld) - } - override def resolveSpecL: UnaryBExpr = op match { - case i: IntUnOp => copy(op = i.toBV, arg = arg.resolveSpecL) - case _ => copy(arg = arg.resolveSpecL) - } - override def resolveOld: BExpr = op match { - case i: IntUnOp => copy(op = i.toBV, arg = arg.resolveOld) - case _ => copy(arg = arg.resolveOld) - } - override def resolveInsideOld: BExpr = op match { - case i: IntUnOp => copy(op = i.toBV, arg = arg.resolveInsideOld) - case _ => copy(arg = arg.resolveInsideOld) - } - override def removeOld: BExpr = op match { - case i: IntUnOp => copy(op = i.toBV, arg = arg.removeOld) - case _ => copy(arg = arg.removeOld) - } override def loads: Set[BExpr] = arg.loads + + override def acceptVisit(visitor: BVisitor): BExpr = visitor.visitUnaryBExpr(this) } case class BinaryBExpr(op: BinOp, arg1: BExpr, arg2: BExpr) extends BExpr { @@ -420,8 +342,6 @@ case class BinaryBExpr(op: BinOp, arg1: BExpr, arg2: BExpr) extends BExpr { case bOp: IntBinOp => s"($arg1 $bOp $arg2)" } - - override def functionOps: Set[FunctionOp] = { val thisFn = op match { case b: BVBinOp => @@ -441,52 +361,9 @@ case class BinaryBExpr(op: BinOp, arg1: BExpr, arg2: BExpr) extends BExpr { override def globals: Set[BVar] = arg1.globals ++ arg2.globals override def specGlobals: Set[SpecGlobalOrAccess] = arg1.specGlobals ++ arg2.specGlobals override def oldSpecGlobals: Set[SpecGlobalOrAccess] = arg1.oldSpecGlobals ++ arg2.oldSpecGlobals - - override def resolveSpec: BinaryBExpr = op match { - case i: IntBinOp => copy(op = i.toBV, arg1 = arg1.resolveSpec, arg2 = arg2.resolveSpec) - case _ => copy(arg1 = arg1.resolveSpec, arg2 = arg2.resolveSpec) - } - - override def resolveSpecInv: BinaryBExpr = op match { - case i: IntBinOp => copy(op = i.toBV, arg1 = arg1.resolveSpecInv, arg2 = arg2.resolveSpecInv) - case _ => copy(arg1 = arg1.resolveSpecInv, arg2 = arg2.resolveSpecInv) - } - - override def resolveSpecInvOld: BinaryBExpr = op match { - case i: IntBinOp => copy(op = i.toBV, arg1 = arg1.resolveSpecInvOld, arg2 = arg2.resolveSpecInvOld) - case _ => copy(arg1 = arg1.resolveSpecInvOld, arg2 = arg2.resolveSpecInvOld) - } - - override def resolveSpecParamOld: BinaryBExpr = op match { - case i: IntBinOp => copy(op = i.toBV, arg1 = arg1.resolveSpec, arg2 = arg2.resolveSpecParamOld) - case _ => copy(arg1 = arg1.resolveSpecParamOld, arg2 = arg2.resolveSpecParamOld) - } - - override def resolveSpecParam: BinaryBExpr = op match { - case i: IntBinOp => copy(op = i.toBV, arg1 = arg1.resolveSpecParam, arg2 = arg2.resolveSpecParam) - case _ => copy(arg1 = arg1.resolveSpecParam, arg2 = arg2.resolveSpecParam) - } - - override def resolveSpecL: BinaryBExpr = op match { - case i: IntBinOp => copy(op = i.toBV, arg1 = arg1.resolveSpecL, arg2 = arg2.resolveSpecL) - case _ => copy(arg1 = arg1.resolveSpecL, arg2 = arg2.resolveSpecL) - } - - override def resolveOld: BinaryBExpr = op match { - case i: IntBinOp => copy(op = i.toBV, arg1 = arg1.resolveOld, arg2 = arg2.resolveOld) - case _ => copy(arg1 = arg1.resolveOld, arg2 = arg2.resolveOld) - } - - override def resolveInsideOld: BinaryBExpr = op match { - case i: IntBinOp => copy(op = i.toBV, arg1 = arg1.resolveInsideOld, arg2 = arg2.resolveInsideOld) - case _ => copy(arg1 = arg1.resolveInsideOld, arg2 = arg2.resolveInsideOld) - } - - override def removeOld: BinaryBExpr = op match { - case i: IntBinOp => copy(op = i.toBV, arg1 = arg1.removeOld, arg2 = arg2.removeOld) - case _ => copy(arg1 = arg1.removeOld, arg2 = arg2.removeOld) - } override def loads: Set[BExpr] = arg1.loads ++ arg2.loads + + override def acceptVisit(visitor: BVisitor): BExpr = visitor.visitBinaryBExpr(this) } case class IfThenElse(guard: BExpr, thenExpr: BExpr, elseExpr: BExpr) extends BExpr { @@ -505,25 +382,9 @@ case class IfThenElse(guard: BExpr, thenExpr: BExpr, elseExpr: BExpr) extends BE override def specGlobals: Set[SpecGlobalOrAccess] = guard.specGlobals ++ thenExpr.specGlobals ++ elseExpr.specGlobals override def oldSpecGlobals: Set[SpecGlobalOrAccess] = guard.oldSpecGlobals ++ thenExpr.oldSpecGlobals ++ elseExpr.oldSpecGlobals - override def resolveSpec: IfThenElse = - copy(guard = guard.resolveSpec, thenExpr = thenExpr.resolveSpec, elseExpr = elseExpr.resolveSpec) - override def resolveSpecInv: IfThenElse = - copy(guard = guard.resolveSpecInv, thenExpr = thenExpr.resolveSpecInv, elseExpr = elseExpr.resolveSpecInv) - override def resolveSpecInvOld: IfThenElse = - copy(guard = guard.resolveSpecInvOld, thenExpr = thenExpr.resolveSpecInvOld, elseExpr = elseExpr.resolveSpecInvOld) - override def resolveSpecParam: IfThenElse = - copy(guard = guard.resolveSpecParam, thenExpr = thenExpr.resolveSpecParam, elseExpr = elseExpr.resolveSpecParam) - override def resolveSpecParamOld: IfThenElse = - copy(guard = guard.resolveSpecParamOld, thenExpr = thenExpr.resolveSpecParamOld, elseExpr = elseExpr.resolveSpecParamOld) - override def resolveSpecL: IfThenElse = - copy(guard = guard.resolveSpecL, thenExpr = thenExpr.resolveSpecL, elseExpr = elseExpr.resolveSpecL) - override def resolveOld: IfThenElse = - copy(guard = guard.resolveOld, thenExpr = thenExpr.resolveOld, elseExpr = elseExpr.resolveOld) - override def resolveInsideOld: IfThenElse = - copy(guard = guard.resolveInsideOld, thenExpr = thenExpr.resolveInsideOld, elseExpr = elseExpr.resolveInsideOld) - override def removeOld: IfThenElse = - copy(guard = guard.removeOld, thenExpr = thenExpr.removeOld, elseExpr = elseExpr.removeOld) override def loads: Set[BExpr] = guard.loads ++ thenExpr.loads ++ elseExpr.loads + + override def acceptVisit(visitor: BVisitor): BExpr = visitor.visitIfThenElse(this) } trait QuantifierExpr(sort: Quantifier, bound: List[BVar], body: BExpr) extends BExpr { @@ -559,13 +420,8 @@ case class Old(body: BExpr) extends BExpr { override def locals: Set[BVar] = body.locals override def globals: Set[BVar] = body.globals override def oldSpecGlobals: Set[SpecGlobalOrAccess] = body.specGlobals - override def resolveSpecParam: BExpr = body.resolveSpecParamOld - override def resolveSpecInv: BExpr = body.resolveSpecInvOld - override def resolveSpec: BExpr = copy(body = body.resolveSpec) - override def resolveSpecL: BExpr = copy(body = body.resolveSpecL) - override def resolveOld: BExpr = body.resolveInsideOld - override def removeOld: BExpr = body.resolveSpec override def loads: Set[BExpr] = body.loads + override def acceptVisit(visitor: BVisitor): BExpr = visitor.visitOld(this) } case class MapAccess(mapVar: BMapVar, index: BExpr) extends BExpr { diff --git a/src/main/scala/boogie/BVisitor.scala b/src/main/scala/boogie/BVisitor.scala new file mode 100644 index 000000000..67c9e9f79 --- /dev/null +++ b/src/main/scala/boogie/BVisitor.scala @@ -0,0 +1,247 @@ +package boogie + +import ir.{Endian, IntBinOp, IntUnOp} +import specification.{ArrayAccess, SpecGamma, SpecGlobal} + +trait BVisitor { + def visitBExpr(node: BExpr): BExpr = node.acceptVisit(this) + + def visitIntBLiteral(node: IntBLiteral): BExpr = node + + def visitBVExtract(node: BVExtract): BExpr = node.copy(body = visitBExpr(node.body)) + + def visitBVRepeat(node: BVRepeat): BExpr = node.copy(body = visitBExpr(node.body)) + + def visitBVZeroExtend(node: BVZeroExtend): BExpr = node.copy(body = visitBExpr(node.body)) + + def visitBVSignExtend(node: BVSignExtend): BExpr = node.copy(body = visitBExpr(node.body)) + + def visitBFunctionCall(node: BFunctionCall): BExpr = node.copy(args = node.args.map(visitBExpr)) + + def visitUnaryBExpr(node: UnaryBExpr): BExpr = node.copy(arg = visitBExpr(node.arg)) + + def visitBinaryBExpr(node: BinaryBExpr): BExpr = { + node.copy(arg1 = visitBExpr(node.arg1), arg2 = visitBExpr(node.arg2)) + } + + def visitIfThenElse(node: IfThenElse): BExpr = { + node.copy(guard = visitBExpr(node.guard), thenExpr = visitBExpr(node.thenExpr), elseExpr = visitBExpr(node.elseExpr)) + } + + def visitOld(node: Old): BExpr = node.copy(body = visitBExpr(node.body)) + + def visitSpecGlobal(node: SpecGlobal): BExpr = node + + def visitSpecGamma(node: SpecGamma): BExpr = node + + def visitArrayAccess(node: ArrayAccess): BExpr = node +} + +trait SpecResolutionVisitor extends BVisitor { + override def visitUnaryBExpr(node: UnaryBExpr): BExpr = { + node.op match { + case i: IntUnOp => node.copy(op = i.toBV, arg = visitBExpr(node.arg)) + case _ => node.copy(arg = visitBExpr(node.arg)) + } + } + + override def visitBinaryBExpr(node: BinaryBExpr): BExpr = { + node.op match { + case i: IntBinOp => node.copy(op = i.toBV, arg1 = visitBExpr(node.arg1), arg2 = visitBExpr(node.arg2)) + case _ => node.copy(arg1 = visitBExpr(node.arg1), arg2 = visitBExpr(node.arg2)) + } + } +} + +object ResolveSpec extends SpecResolutionVisitor { + override def visitSpecGlobal(node: SpecGlobal): BMemoryLoad = { + BMemoryLoad( + BMapVar("mem", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global), + node.toAddrVar, + Endian.LittleEndian, + node.size + ) + } + + override def visitSpecGamma(node: SpecGamma): GammaLoad = { + GammaLoad( + BMapVar("Gamma_mem", MapBType(BitVecBType(64), BoolBType), Scope.Global), + node.global.toAddrVar, + node.global.size, + node.global.size / 8 + ) + } + + override def visitArrayAccess(node: ArrayAccess): BMemoryLoad = { + BMemoryLoad( + BMapVar("mem", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global), + node.toAddrVar, + Endian.LittleEndian, + node.global.size + ) + } + +} + +object ResolveOld extends SpecResolutionVisitor { + override def visitOld(node: Old): BExpr = ResolveInsideOld.visitBExpr(node.body) + override def visitSpecGlobal(node: SpecGlobal): BMemoryLoad = ResolveSpec.visitSpecGlobal(node) + override def visitSpecGamma(node: SpecGamma): GammaLoad = ResolveSpec.visitSpecGamma(node) + override def visitArrayAccess(node: ArrayAccess): BMemoryLoad = ResolveSpec.visitArrayAccess(node) +} + +object RemoveOld extends SpecResolutionVisitor { + override def visitOld(node: Old): BExpr = ResolveSpec.visitBExpr(node.body) + override def visitSpecGlobal(node: SpecGlobal): BMemoryLoad = ResolveSpec.visitSpecGlobal(node) + override def visitSpecGamma(node: SpecGamma): GammaLoad = ResolveSpec.visitSpecGamma(node) + override def visitArrayAccess(node: ArrayAccess): BMemoryLoad = ResolveSpec.visitArrayAccess(node) +} + +object ResolveSpecL extends SpecResolutionVisitor { + override def visitSpecGlobal(node: SpecGlobal): BMemoryLoad = { + BMemoryLoad( + BMapVar("memory", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter), + node.toAddrVar, + Endian.LittleEndian, + node.size + ) + } + + override def visitSpecGamma(node: SpecGamma): GammaLoad = ResolveSpec.visitSpecGamma(node) + + override def visitArrayAccess(node: ArrayAccess): BMemoryLoad = { + BMemoryLoad( + BMapVar("memory", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter), + node.toAddrVar, + Endian.LittleEndian, + node.global.size + ) + } +} + +object ResolveInsideOld extends SpecResolutionVisitor { + override def visitSpecGlobal(node: SpecGlobal): BExpr = node.toOldVar + override def visitSpecGamma(node: SpecGamma): BExpr = node.global.toOldGamma + override def visitArrayAccess(node: ArrayAccess): BExpr = node.toOldVar +} + +object ResolveSpecParam extends SpecResolutionVisitor { + override def visitOld(node: Old): BExpr = ResolveSpecParamOld.visitBExpr(node.body) + + override def visitSpecGlobal(node: SpecGlobal): BMemoryLoad = { + BMemoryLoad( + BMapVar("mem$out", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter), + node.toAddrVar, + Endian.LittleEndian, + node.size + ) + } + + override def visitSpecGamma(node: SpecGamma): GammaLoad = { + GammaLoad( + BMapVar("Gamma_mem$out", MapBType(BitVecBType(64), BoolBType), Scope.Parameter), + node.global.toAddrVar, + node.global.size, + node.global.size / 8 + ) + } + + override def visitArrayAccess(node: ArrayAccess): BMemoryLoad = { + BMemoryLoad( + BMapVar("mem$out", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter), + node.toAddrVar, + Endian.LittleEndian, + node.global.size + ) + } +} + +object ResolveSpecParamOld extends SpecResolutionVisitor { + override def visitSpecGlobal(node: SpecGlobal): BMemoryLoad = { + BMemoryLoad( + BMapVar("mem$in", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter), + node.toAddrVar, + Endian.LittleEndian, + node.size + ) + } + + override def visitSpecGamma(node: SpecGamma): GammaLoad = { + GammaLoad( + BMapVar("Gamma_mem$in", MapBType(BitVecBType(64), BoolBType), Scope.Parameter), + node.global.toAddrVar, + node.global.size, + node.global.size / 8 + ) + } + + override def visitArrayAccess(node: ArrayAccess): BMemoryLoad = { + BMemoryLoad( + BMapVar("mem$in", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter), + node.toAddrVar, + Endian.LittleEndian, + node.global.size + ) + } +} + +object ResolveSpecInv extends SpecResolutionVisitor { + override def visitOld(node: Old): BExpr = ResolveSpecInvOld.visitBExpr(node.body) + + override def visitSpecGlobal(node: SpecGlobal): BMemoryLoad = { + BMemoryLoad( + BMapVar("mem$inv2", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Local), + node.toAddrVar, + Endian.LittleEndian, + node.size + ) + } + + override def visitSpecGamma(node: SpecGamma): GammaLoad = { + GammaLoad( + BMapVar("Gamma_mem$inv2", MapBType(BitVecBType(64), BoolBType), Scope.Local), + node.global.toAddrVar, + node.global.size, + node.global.size / 8 + ) + } + + override def visitArrayAccess(node: ArrayAccess): BMemoryLoad = { + BMemoryLoad( + BMapVar("mem$inv2", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Local), + node.toAddrVar, + Endian.LittleEndian, + node.global.size + ) + } +} + +object ResolveSpecInvOld extends SpecResolutionVisitor { + override def visitSpecGlobal(node: SpecGlobal): BMemoryLoad = { + BMemoryLoad( + BMapVar("mem$inv1", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Local), + node.toAddrVar, + Endian.LittleEndian, + node.size + ) + } + + override def visitSpecGamma(node: SpecGamma): GammaLoad = { + GammaLoad( + BMapVar("Gamma_mem$inv1", MapBType(BitVecBType(64), BoolBType), Scope.Local), + node.global.toAddrVar, + node.global.size, + node.global.size / 8 + ) + } + + override def visitArrayAccess(node: ArrayAccess): BMemoryLoad = { + BMemoryLoad( + BMapVar("mem$inv1", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Local), + node.toAddrVar, + Endian.LittleEndian, + node.global.size + ) + } + +} \ No newline at end of file diff --git a/src/main/scala/specification/Specification.scala b/src/main/scala/specification/Specification.scala index 6ae8d8cfe..a5c243f26 100644 --- a/src/main/scala/specification/Specification.scala +++ b/src/main/scala/specification/Specification.scala @@ -1,7 +1,7 @@ package specification -import boogie._ -import ir._ +import boogie.* +import ir.* import util.Logger trait SpecVar extends BExpr { @@ -24,83 +24,11 @@ case class SpecGlobal(name: String, override val size: Int, arraySize: Option[In override val toOldVar: BVar = BVariable(s"${name}_old", BitVecBType(size), Scope.Local) override val toOldGamma: BVar = BVariable(s"Gamma_${name}_old", BoolBType, Scope.Local) val toAxiom: BAxiom = BAxiom(BinaryBExpr(BoolEQ, toAddrVar, BitVecBLiteral(address, 64)), List.empty) - override def resolveSpec: BMemoryLoad = BMemoryLoad( - BMapVar("mem", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global), - toAddrVar, - Endian.LittleEndian, - size - ) - override def resolveSpecParam: BMemoryLoad = BMemoryLoad( - BMapVar("mem$out", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter), - toAddrVar, - Endian.LittleEndian, - size - ) - override def resolveSpecParamOld: BMemoryLoad = BMemoryLoad( - BMapVar("mem$in", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter), - toAddrVar, - Endian.LittleEndian, - size - ) - override def resolveSpecInv: BMemoryLoad = BMemoryLoad( - BMapVar("mem$inv2", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Local), - toAddrVar, - Endian.LittleEndian, - size - ) - override def resolveSpecInvOld: BMemoryLoad = BMemoryLoad( - BMapVar("mem$inv1", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Local), - toAddrVar, - Endian.LittleEndian, - size - ) - override def resolveOld: BMemoryLoad = resolveSpec - override def resolveInsideOld: BExpr = toOldVar - override def removeOld: BMemoryLoad = resolveSpec - override def resolveSpecL: BMemoryLoad = BMemoryLoad( - BMapVar("memory", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter), - toAddrVar, - Endian.LittleEndian, - size - ) + override def acceptVisit(visitor: BVisitor): BExpr = visitor.visitSpecGlobal(this) } case class SpecGamma(global: SpecGlobal) extends SpecVar { - // TODO don't hardcode this - override def resolveSpec: GammaLoad = GammaLoad( - BMapVar("Gamma_mem", MapBType(BitVecBType(64), BoolBType), Scope.Global), - global.toAddrVar, - global.size, - global.size / 8 - ) - override def resolveSpecParam: GammaLoad = GammaLoad( - BMapVar("Gamma_mem$out", MapBType(BitVecBType(64), BoolBType), Scope.Parameter), - global.toAddrVar, - global.size, - global.size / 8 - ) - override def resolveSpecParamOld: GammaLoad = GammaLoad( - BMapVar("Gamma_mem$in", MapBType(BitVecBType(64), BoolBType), Scope.Parameter), - global.toAddrVar, - global.size, - global.size / 8 - ) - override def resolveSpecInv: GammaLoad = GammaLoad( - BMapVar("Gamma_mem$inv2", MapBType(BitVecBType(64), BoolBType), Scope.Local), - global.toAddrVar, - global.size, - global.size / 8 - ) - override def resolveSpecInvOld: GammaLoad = GammaLoad( - BMapVar("Gamma_mem$inv1", MapBType(BitVecBType(64), BoolBType), Scope.Local), - global.toAddrVar, - global.size, - global.size / 8 - ) - override def resolveOld: GammaLoad = resolveSpec - override def resolveInsideOld: BExpr = global.toOldGamma - override def removeOld: GammaLoad = resolveSpec - override def resolveSpecL: GammaLoad = resolveSpec + override def acceptVisit(visitor: BVisitor): BExpr = visitor.visitSpecGamma(this) } case class ArrayAccess(global: SpecGlobal, index: Int) extends SpecGlobalOrAccess { @@ -110,46 +38,7 @@ case class ArrayAccess(global: SpecGlobal, index: Int) extends SpecGlobalOrAcces override val toAddrVar: BExpr = BinaryBExpr(BVADD, global.toAddrVar, accessIndex) override val toOldGamma: BVar = BVariable(s"Gamma_${global.name}$$${index}_old", BoolBType, Scope.Local) override def specGlobals: Set[SpecGlobalOrAccess] = Set(this) - override def resolveSpec: BMemoryLoad = BMemoryLoad( - BMapVar("mem", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global), - toAddrVar, - Endian.LittleEndian, - global.size - ) - override def resolveSpecParam: BMemoryLoad = BMemoryLoad( - BMapVar("mem$out", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter), - toAddrVar, - Endian.LittleEndian, - global.size - ) - override def resolveSpecParamOld: BMemoryLoad = BMemoryLoad( - BMapVar("mem$in", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter), - toAddrVar, - Endian.LittleEndian, - global.size - ) - - override def resolveSpecInv: BMemoryLoad = BMemoryLoad( - BMapVar("mem$inv2", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Local), - toAddrVar, - Endian.LittleEndian, - global.size - ) - override def resolveSpecInvOld: BMemoryLoad = BMemoryLoad( - BMapVar("mem$inv1", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Local), - toAddrVar, - Endian.LittleEndian, - global.size - ) - override def resolveOld: BMemoryLoad = resolveSpec - override def resolveInsideOld: BExpr = toOldVar - override def removeOld: BMemoryLoad = resolveSpec - override def resolveSpecL: BMemoryLoad = BMemoryLoad( - BMapVar("memory", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter), - toAddrVar, - Endian.LittleEndian, - global.size - ) + override def acceptVisit(visitor: BVisitor): BExpr = visitor.visitArrayAccess(this) } case class Specification( @@ -164,7 +53,7 @@ case class Specification( val controls: Map[SpecGlobalOrAccess, Set[SpecGlobal]] = { val controlledBy = LPreds.map((k, v) => k -> v.specGlobals).collect { case (k, v) if v.nonEmpty => (k, v) } - controlledBy.toSet.flatMap((k, v) => v.map(_ -> k)).groupMap(_._1)(_._2) + controlledBy.toSet.flatMap((k, v) => v.map(_ -> k)).groupMap(_(0))(_(1)) } val controlled: Set[SpecGlobal] = controls.values.flatten.toSet } diff --git a/src/main/scala/translating/IRToBoogie.scala b/src/main/scala/translating/IRToBoogie.scala index 26a6ac8f8..d6d041446 100644 --- a/src/main/scala/translating/IRToBoogie.scala +++ b/src/main/scala/translating/IRToBoogie.scala @@ -13,17 +13,17 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti private val globals = spec.globals private val controls = spec.controls private val controlled = spec.controlled - private val relies = spec.relies.map(r => r.resolveSpec) - private val reliesParam = spec.relies.map(r => r.resolveSpecParam) - private val reliesReflexive = spec.relies.map(r => r.removeOld) - private val guarantees = spec.guarantees.map(g => g.resolveOld) - private val guaranteesParam = spec.guarantees.map(g => g.resolveSpecParam) - private val guaranteesReflexive = spec.guarantees.map(g => g.removeOld) + private val relies = spec.relies.map(ResolveSpec.visitBExpr) + private val reliesParam = spec.relies.map(ResolveSpecParam.visitBExpr) + private val reliesReflexive = spec.relies.map(RemoveOld.visitBExpr) + private val guarantees = spec.guarantees.map(ResolveOld.visitBExpr) + private val guaranteesParam = spec.guarantees.map(ResolveSpecParam.visitBExpr) + private val guaranteesReflexive = spec.guarantees.map(RemoveOld.visitBExpr) private val guaranteeOldVars = spec.guaranteeOldVars - private val LPreds = spec.LPreds.map((k, v) => k -> v.resolveSpecL) - private val requires = spec.subroutines.map(s => s.name -> s.requires.map(e => e.resolveSpec)).toMap + private val LPreds = spec.LPreds.map((k, v) => k -> ResolveSpecL.visitBExpr(v)) + private val requires = spec.subroutines.map(s => s.name -> s.requires.map(ResolveSpec.visitBExpr)).toMap private val requiresDirect = spec.subroutines.map(s => s.name -> s.requiresDirect).toMap - private val ensures = spec.subroutines.map(s => s.name -> s.ensures.map(e => e.resolveSpec)).toMap + private val ensures = spec.subroutines.map(s => s.name -> s.ensures.map(ResolveSpec.visitBExpr)).toMap private val ensuresDirect = spec.subroutines.map(s => s.name -> s.ensuresDirect).toMap private val libRelies = spec.subroutines.map(s => s.name -> s.rely).toMap private val libGuarantees = spec.subroutines.map(s => s.name -> s.guarantee).toMap @@ -172,7 +172,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti relyEnsures.head } - val guaranteeEnsures = libGuarantees(name).map(g => g.resolveSpecParam) + val guaranteeEnsures = libGuarantees(name).map(ResolveSpecParam.visitBExpr) val guaranteeOneLine = if (guaranteeEnsures.size > 1) { guaranteeEnsures.tail.foldLeft(guaranteeEnsures.head)((ands: BExpr, next: BExpr) => BinaryBExpr(BoolAND, ands, next)) } else { @@ -194,7 +194,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti def genLibGuarantee(name: String): BProcedure = { // G_f - val guaranteeLib = libGuarantees(name).map(g => g.resolveSpecParam) + val guaranteeLib = libGuarantees(name).map(ResolveSpecParam.visitBExpr) val guaranteeOneLine = if (guaranteeLib.size > 1) { guaranteeLib.tail.foldLeft(guaranteeLib.head)((ands: BExpr, next: BExpr) => BinaryBExpr(BoolAND, ands, next)) } else { @@ -613,11 +613,11 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti * */ (libRelies.keySet ++ libGuarantees.keySet).filter(x => libRelies(x).nonEmpty && libGuarantees(x).nonEmpty).map(targetName => { - val Rc: BExpr = spec.relies.reduce((a, b) => BinaryBExpr(BoolAND, a, b)).resolveSpec - val Gc: BExpr = spec.guarantees.reduce((a, b) => BinaryBExpr(BoolAND, a, b)).resolveSpec + val Rc: BExpr = ResolveSpec.visitBExpr(spec.relies.reduce((a, b) => BinaryBExpr(BoolAND, a, b))) + val Gc: BExpr = ResolveSpec.visitBExpr(spec.guarantees.reduce((a, b) => BinaryBExpr(BoolAND, a, b))) - val Rf: BExpr = libRelies(targetName).reduce((a, b) => BinaryBExpr(BoolAND, a, b)).resolveSpec - val Gf: BExpr = libGuarantees(targetName).reduce((a, b) => BinaryBExpr(BoolAND, a, b)).resolveSpec + val Rf: BExpr = ResolveSpec.visitBExpr(libRelies(targetName).reduce((a, b) => BinaryBExpr(BoolAND, a, b))) + val Gf: BExpr = ResolveSpec.visitBExpr(libGuarantees(targetName).reduce((a, b) => BinaryBExpr(BoolAND, a, b))) val inv = BinaryBExpr(BoolOR, Rc, Gf) val conseq = BinaryBExpr(BoolIMPLIES, Rc, Rf) @@ -685,7 +685,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti if (libRelies.contains(d.target.name) && libGuarantees.contains(d.target.name) && libRelies(d.target.name).nonEmpty && libGuarantees(d.target.name).nonEmpty) { val invCall1 = BProcedureCall(d.target.name + "$inv", List(mem_inv1, Gamma_mem_inv1), List(mem, Gamma_mem)) val invCall2 = BProcedureCall("rely$inv", List(mem_inv2, Gamma_mem_inv2), List(mem_inv1, Gamma_mem_inv1)) - val libRGAssert = libRelies(d.target.name).map(r => BAssert(r.resolveSpecInv)) + val libRGAssert = libRelies(d.target.name).map(r => BAssert(ResolveSpecInv.visitBExpr(r))) List(invCall1, invCall2) ++ libRGAssert } else { List() From 8612486ad5c72a7e6de4e2bf134cc8b070ff2f88 Mon Sep 17 00:00:00 2001 From: l-kent Date: Tue, 22 Oct 2024 13:06:45 +1000 Subject: [PATCH 086/104] fix mistake in SystemTests --- src/test/scala/SystemTests.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/scala/SystemTests.scala b/src/test/scala/SystemTests.scala index 6be01a68e..88d3779e2 100644 --- a/src/test/scala/SystemTests.scala +++ b/src/test/scala/SystemTests.scala @@ -176,8 +176,8 @@ trait SystemTests extends AnyFunSuite, BASILTest { } class SystemTestsBAP extends SystemTests { - runTests("correct", TestConfig(useBAPFrontend = true, expectVerify = true, logResults = true)) - runTests("incorrect", TestConfig(useBAPFrontend = true, expectVerify = false, logResults = true)) + runTests("correct", TestConfig(useBAPFrontend = true, expectVerify = true, checkExpected = true, logResults = true)) + runTests("incorrect", TestConfig(useBAPFrontend = true, expectVerify = false, checkExpected = true, logResults = true)) test("summary-BAP") { summary("testresult-BAP") } From b228b7f605ffbce1fedf69549f18b20607de6074 Mon Sep 17 00:00:00 2001 From: l-kent Date: Wed, 23 Oct 2024 09:21:03 +1000 Subject: [PATCH 087/104] add regions to boogie output for specifications and guarantees --- src/main/scala/analysis/RegionInjector.scala | 9 +++ src/main/scala/boogie/BExpr.scala | 7 -- src/main/scala/boogie/BVisitor.scala | 75 +++++++++++-------- .../scala/specification/Specification.scala | 4 +- src/main/scala/translating/IRToBoogie.scala | 32 ++++---- src/main/scala/util/RunUtils.scala | 8 +- 6 files changed, 77 insertions(+), 58 deletions(-) diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index 0b31464ef..1cff0e941 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -360,6 +360,15 @@ class RegionInjector(program: Program, } } } + + def getMergedRegion(address: BigInt): Option[MergedRegion] = { + val region = mmm.findDataObject(address) + if (region.isDefined && mergedRegions.contains(region.get)) { + Some(mergedRegions(region.get)) + } else { + None + } + } } class RegionRenamer(memory: Memory) extends Visitor { diff --git a/src/main/scala/boogie/BExpr.scala b/src/main/scala/boogie/BExpr.scala index c6249bbd8..5957cb0e2 100644 --- a/src/main/scala/boogie/BExpr.scala +++ b/src/main/scala/boogie/BExpr.scala @@ -44,13 +44,6 @@ case class BitVecBLiteral(value: BigInt, size: Int) extends BLiteral { case class IntBLiteral(value: BigInt) extends BLiteral { override val getType: BType = IntBType override def toString: String = value.toString - /* - override def resolveSpecL: BitVecBLiteral = BitVecBLiteral(value, 32) // TODO - override def resolveSpec: BitVecBLiteral = BitVecBLiteral(value, 32) // TODO - override def resolveOld: BitVecBLiteral = BitVecBLiteral(value, 32) // TODO - override def removeOld: BitVecBLiteral = BitVecBLiteral(value, 32) // TODO - */ - override def acceptVisit(visitor: BVisitor): BExpr = visitor.visitIntBLiteral(this) } case class BVExtract(end: Int, start: Int, body: BExpr) extends BExpr { diff --git a/src/main/scala/boogie/BVisitor.scala b/src/main/scala/boogie/BVisitor.scala index 67c9e9f79..aa17df0e0 100644 --- a/src/main/scala/boogie/BVisitor.scala +++ b/src/main/scala/boogie/BVisitor.scala @@ -1,13 +1,12 @@ package boogie +import analysis.RegionInjector import ir.{Endian, IntBinOp, IntUnOp} import specification.{ArrayAccess, SpecGamma, SpecGlobal} trait BVisitor { def visitBExpr(node: BExpr): BExpr = node.acceptVisit(this) - def visitIntBLiteral(node: IntBLiteral): BExpr = node - def visitBVExtract(node: BVExtract): BExpr = node.copy(body = visitBExpr(node.body)) def visitBVRepeat(node: BVRepeat): BExpr = node.copy(body = visitBExpr(node.body)) @@ -53,51 +52,63 @@ trait SpecResolutionVisitor extends BVisitor { } } -object ResolveSpec extends SpecResolutionVisitor { +class ResolveSpec(regionInjector: Option[RegionInjector]) extends SpecResolutionVisitor { + private val mem = BMapVar("mem", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) + private val gammaMem = BMapVar("Gamma_mem", MapBType(BitVecBType(64), BoolBType), Scope.Global) + override def visitSpecGlobal(node: SpecGlobal): BMemoryLoad = { - BMemoryLoad( - BMapVar("mem", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global), - node.toAddrVar, - Endian.LittleEndian, - node.size - ) + val memory = if (regionInjector.isDefined) { + regionInjector.get.getMergedRegion(node.address) match { + case Some(region) => BMapVar(region.name, MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) + case None => mem + } + } else { + mem + } + BMemoryLoad(memory, node.toAddrVar, Endian.LittleEndian, node.size) } override def visitSpecGamma(node: SpecGamma): GammaLoad = { - GammaLoad( - BMapVar("Gamma_mem", MapBType(BitVecBType(64), BoolBType), Scope.Global), - node.global.toAddrVar, - node.global.size, - node.global.size / 8 - ) + val gammaMemory = if (regionInjector.isDefined) { + regionInjector.get.getMergedRegion(node.global.address) match { + case Some(region) => BMapVar(s"Gamma_${region.name}", MapBType(BitVecBType(64), BoolBType), Scope.Global) + case None => gammaMem + } + } else { + gammaMem + } + GammaLoad(gammaMemory, node.global.toAddrVar, node.global.size, node.global.size / 8) } override def visitArrayAccess(node: ArrayAccess): BMemoryLoad = { - BMemoryLoad( - BMapVar("mem", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global), - node.toAddrVar, - Endian.LittleEndian, - node.global.size - ) + val memory = if (regionInjector.isDefined) { + regionInjector.get.getMergedRegion(node.global.address + node.offset) match { + case Some(region) => BMapVar(region.name, MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) + case None => mem + } + } else { + mem + } + BMemoryLoad(memory, node.toAddrVar, Endian.LittleEndian, node.size) } } -object ResolveOld extends SpecResolutionVisitor { +class ResolveOld(resolveSpec: ResolveSpec) extends SpecResolutionVisitor { override def visitOld(node: Old): BExpr = ResolveInsideOld.visitBExpr(node.body) - override def visitSpecGlobal(node: SpecGlobal): BMemoryLoad = ResolveSpec.visitSpecGlobal(node) - override def visitSpecGamma(node: SpecGamma): GammaLoad = ResolveSpec.visitSpecGamma(node) - override def visitArrayAccess(node: ArrayAccess): BMemoryLoad = ResolveSpec.visitArrayAccess(node) + override def visitSpecGlobal(node: SpecGlobal): BMemoryLoad = resolveSpec.visitSpecGlobal(node) + override def visitSpecGamma(node: SpecGamma): GammaLoad = resolveSpec.visitSpecGamma(node) + override def visitArrayAccess(node: ArrayAccess): BMemoryLoad = resolveSpec.visitArrayAccess(node) } -object RemoveOld extends SpecResolutionVisitor { - override def visitOld(node: Old): BExpr = ResolveSpec.visitBExpr(node.body) - override def visitSpecGlobal(node: SpecGlobal): BMemoryLoad = ResolveSpec.visitSpecGlobal(node) - override def visitSpecGamma(node: SpecGamma): GammaLoad = ResolveSpec.visitSpecGamma(node) - override def visitArrayAccess(node: ArrayAccess): BMemoryLoad = ResolveSpec.visitArrayAccess(node) +class RemoveOld(resolveSpec: ResolveSpec) extends SpecResolutionVisitor { + override def visitOld(node: Old): BExpr = resolveSpec.visitBExpr(node.body) + override def visitSpecGlobal(node: SpecGlobal): BMemoryLoad = resolveSpec.visitSpecGlobal(node) + override def visitSpecGamma(node: SpecGamma): GammaLoad = resolveSpec.visitSpecGamma(node) + override def visitArrayAccess(node: ArrayAccess): BMemoryLoad = resolveSpec.visitArrayAccess(node) } -object ResolveSpecL extends SpecResolutionVisitor { +class ResolveSpecL(resolveSpec: ResolveSpec) extends SpecResolutionVisitor { override def visitSpecGlobal(node: SpecGlobal): BMemoryLoad = { BMemoryLoad( BMapVar("memory", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter), @@ -107,7 +118,7 @@ object ResolveSpecL extends SpecResolutionVisitor { ) } - override def visitSpecGamma(node: SpecGamma): GammaLoad = ResolveSpec.visitSpecGamma(node) + override def visitSpecGamma(node: SpecGamma): GammaLoad = resolveSpec.visitSpecGamma(node) override def visitArrayAccess(node: ArrayAccess): BMemoryLoad = { BMemoryLoad( diff --git a/src/main/scala/specification/Specification.scala b/src/main/scala/specification/Specification.scala index a5c243f26..1bb536167 100644 --- a/src/main/scala/specification/Specification.scala +++ b/src/main/scala/specification/Specification.scala @@ -33,9 +33,9 @@ case class SpecGamma(global: SpecGlobal) extends SpecVar { case class ArrayAccess(global: SpecGlobal, index: Int) extends SpecGlobalOrAccess { override val size: Int = global.size - private val accessIndex = BitVecBLiteral(index * (global.size / 8), 64) + val offset = index * (global.size / 8) override val toOldVar: BVar = BVariable(s"${global.name}$$${index}_old", BitVecBType(global.size), Scope.Local) - override val toAddrVar: BExpr = BinaryBExpr(BVADD, global.toAddrVar, accessIndex) + override val toAddrVar: BExpr = BinaryBExpr(BVADD, global.toAddrVar, BitVecBLiteral(offset, 64)) override val toOldGamma: BVar = BVariable(s"Gamma_${global.name}$$${index}_old", BoolBType, Scope.Local) override def specGlobals: Set[SpecGlobalOrAccess] = Set(this) override def acceptVisit(visitor: BVisitor): BExpr = visitor.visitArrayAccess(this) diff --git a/src/main/scala/translating/IRToBoogie.scala b/src/main/scala/translating/IRToBoogie.scala index d6d041446..e57b1195b 100644 --- a/src/main/scala/translating/IRToBoogie.scala +++ b/src/main/scala/translating/IRToBoogie.scala @@ -1,4 +1,5 @@ package translating +import analysis.RegionInjector import ir.{BoolOR, *} import boogie.* import specification.* @@ -7,23 +8,27 @@ import util.{BoogieGeneratorConfig, BoogieMemoryAccessMode, ProcRelyVersion} import scala.collection.mutable import scala.collection.mutable.ArrayBuffer -class IRToBoogie(var program: Program, var spec: Specification, var thread: Option[ProgramThread], val filename: String) { +class IRToBoogie(var program: Program, var spec: Specification, var thread: Option[ProgramThread], val filename: String, val regionInjector: Option[RegionInjector]) { private val externAttr = BAttribute("extern") private val inlineAttr = BAttribute("inline") private val globals = spec.globals private val controls = spec.controls private val controlled = spec.controlled - private val relies = spec.relies.map(ResolveSpec.visitBExpr) + private val resolveSpec = ResolveSpec(regionInjector) + private val resolveSpecL = ResolveSpecL(resolveSpec) + private val resolveOld = ResolveOld(resolveSpec) + private val removeOld = RemoveOld(resolveSpec) + private val relies = spec.relies.map(resolveSpec.visitBExpr) private val reliesParam = spec.relies.map(ResolveSpecParam.visitBExpr) - private val reliesReflexive = spec.relies.map(RemoveOld.visitBExpr) - private val guarantees = spec.guarantees.map(ResolveOld.visitBExpr) + private val reliesReflexive = spec.relies.map(removeOld.visitBExpr) + private val guarantees = spec.guarantees.map(resolveOld.visitBExpr) private val guaranteesParam = spec.guarantees.map(ResolveSpecParam.visitBExpr) - private val guaranteesReflexive = spec.guarantees.map(RemoveOld.visitBExpr) + private val guaranteesReflexive = spec.guarantees.map(removeOld.visitBExpr) private val guaranteeOldVars = spec.guaranteeOldVars - private val LPreds = spec.LPreds.map((k, v) => k -> ResolveSpecL.visitBExpr(v)) - private val requires = spec.subroutines.map(s => s.name -> s.requires.map(ResolveSpec.visitBExpr)).toMap + private val LPreds = spec.LPreds.map((k, v) => k -> resolveSpecL.visitBExpr(v)) + private val requires = spec.subroutines.map(s => s.name -> s.requires.map(resolveSpec.visitBExpr)).toMap private val requiresDirect = spec.subroutines.map(s => s.name -> s.requiresDirect).toMap - private val ensures = spec.subroutines.map(s => s.name -> s.ensures.map(ResolveSpec.visitBExpr)).toMap + private val ensures = spec.subroutines.map(s => s.name -> s.ensures.map(resolveSpec.visitBExpr)).toMap private val ensuresDirect = spec.subroutines.map(s => s.name -> s.ensuresDirect).toMap private val libRelies = spec.subroutines.map(s => s.name -> s.rely).toMap private val libGuarantees = spec.subroutines.map(s => s.name -> s.guarantee).toMap @@ -82,7 +87,6 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti val rgProcs = genRely(relies, readOnlyMemory) :+ guaranteeReflexive - val rgLib = config.procedureRely match { case Some(ProcRelyVersion.Function) => // if rely/guarantee lib exist, create genRelyInv, and genInv for every procedure where rely/guarantee lib exist @@ -95,7 +99,6 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti case None => Nil } - val functionsUsed1 = procedures.flatMap(p => p.functionOps).toSet ++ rgProcs.flatMap(p => p.functionOps).toSet ++ rgLib.flatMap(p => p.functionOps).toSet ++ @@ -106,7 +109,6 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti val functionsUsed4 = functionsUsed3.flatMap(p => p.functionOps).map(p => functionOpToDefinition(p)) val functionsUsed = (functionsUsed2 ++ functionsUsed3 ++ functionsUsed4).toList.sorted - val declarations = globalDecls ++ globalConsts ++ functionsUsed ++ rgLib ++ pushUpModifiesFixedPoint(rgProcs ++ procedures) BProgram(declarations, filename) } @@ -613,11 +615,11 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti * */ (libRelies.keySet ++ libGuarantees.keySet).filter(x => libRelies(x).nonEmpty && libGuarantees(x).nonEmpty).map(targetName => { - val Rc: BExpr = ResolveSpec.visitBExpr(spec.relies.reduce((a, b) => BinaryBExpr(BoolAND, a, b))) - val Gc: BExpr = ResolveSpec.visitBExpr(spec.guarantees.reduce((a, b) => BinaryBExpr(BoolAND, a, b))) + val Rc: BExpr = resolveSpec.visitBExpr(spec.relies.reduce((a, b) => BinaryBExpr(BoolAND, a, b))) + val Gc: BExpr = resolveSpec.visitBExpr(spec.guarantees.reduce((a, b) => BinaryBExpr(BoolAND, a, b))) - val Rf: BExpr = ResolveSpec.visitBExpr(libRelies(targetName).reduce((a, b) => BinaryBExpr(BoolAND, a, b))) - val Gf: BExpr = ResolveSpec.visitBExpr(libGuarantees(targetName).reduce((a, b) => BinaryBExpr(BoolAND, a, b))) + val Rf: BExpr = resolveSpec.visitBExpr(libRelies(targetName).reduce((a, b) => BinaryBExpr(BoolAND, a, b))) + val Gf: BExpr = resolveSpec.visitBExpr(libGuarantees(targetName).reduce((a, b) => BinaryBExpr(BoolAND, a, b))) val inv = BinaryBExpr(BoolOR, Rc, Gf) val conseq = BinaryBExpr(BoolIMPLIES, Rc, Rf) diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index db33cb6d4..fe9f07b06 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -63,6 +63,7 @@ case class StaticAnalysisContext( memoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], varDepsSummaries: Map[Procedure, Map[Taintable, Set[Taintable]]], + regionInjector: RegionInjector ) /** Results of the main program execution. @@ -438,6 +439,7 @@ object StaticAnalysis { memoryRegionContents = memoryRegionContents, reachingDefs = reachingDefinitionsAnalysisResults, varDepsSummaries = varDepsSummaries, + regionInjector = regionInjector ) } @@ -521,16 +523,18 @@ object RunUtils { Logger.debug("[!] Translating to Boogie") + val regionInjector = analysis.map(a => a.regionInjector) + val boogiePrograms = if (q.boogieTranslation.threadSplit && ctx.program.threads.nonEmpty) { val outPrograms = ArrayBuffer[BProgram]() for (thread <- ctx.program.threads) { val fileName = q.outputPrefix.stripSuffix(".bpl") + "_" + thread.entry.name + ".bpl" - val boogieTranslator = IRToBoogie(ctx.program, ctx.specification, Some(thread), fileName) + val boogieTranslator = IRToBoogie(ctx.program, ctx.specification, Some(thread), fileName, regionInjector) outPrograms.addOne(boogieTranslator.translate(q.boogieTranslation)) } outPrograms } else { - val boogieTranslator = IRToBoogie(ctx.program, ctx.specification, None, q.outputPrefix) + val boogieTranslator = IRToBoogie(ctx.program, ctx.specification, None, q.outputPrefix, regionInjector) ArrayBuffer(boogieTranslator.translate(q.boogieTranslation)) } assert(invariant.singleCallBlockEnd(ctx.program)) From d601a5190951134565d75bf96d7cbc4dfeff778d Mon Sep 17 00:00:00 2001 From: l-kent Date: Wed, 23 Oct 2024 11:30:32 +1000 Subject: [PATCH 088/104] add regions to boogie output for rely --- src/main/scala/translating/IRToBoogie.scala | 44 +++++++++++++++------ src/main/scala/util/RunUtils.scala | 8 ++-- 2 files changed, 36 insertions(+), 16 deletions(-) diff --git a/src/main/scala/translating/IRToBoogie.scala b/src/main/scala/translating/IRToBoogie.scala index e57b1195b..3b03e529a 100644 --- a/src/main/scala/translating/IRToBoogie.scala +++ b/src/main/scala/translating/IRToBoogie.scala @@ -8,7 +8,7 @@ import util.{BoogieGeneratorConfig, BoogieMemoryAccessMode, ProcRelyVersion} import scala.collection.mutable import scala.collection.mutable.ArrayBuffer -class IRToBoogie(var program: Program, var spec: Specification, var thread: Option[ProgramThread], val filename: String, val regionInjector: Option[RegionInjector]) { +class IRToBoogie(var program: Program, var spec: Specification, var thread: Option[ProgramThread], val filename: String, val regionInjector: Option[RegionInjector], val config: BoogieGeneratorConfig) { private val externAttr = BAttribute("extern") private val inlineAttr = BAttribute("inline") private val globals = spec.globals @@ -37,6 +37,18 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti private val mem = BMapVar("mem", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) private val Gamma_mem = BMapVar("Gamma_mem", MapBType(BitVecBType(64), BoolBType), Scope.Global) + private val memoriesGammas = if (regionInjector.isDefined) { + regionInjector.get.mergedRegions.values.map { region => + val memory = BMapVar(region.name, MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) + val gamma = BMapVar(s"Gamma_${region.name}", MapBType(BitVecBType(64), BoolBType), Scope.Global) + memory -> gamma + }.toMap + } else { + Map(mem -> Gamma_mem) + } + + private val memories: Set[BVar] = memoriesGammas.flatMap((k, v) => Set(k, v)).toSet + private val mem_in = BMapVar("mem$in", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter) private val Gamma_mem_in = BMapVar("Gamma_mem$in", MapBType(BitVecBType(64), BoolBType), Scope.Parameter) private val mem_out = BMapVar("mem$out", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter) @@ -47,8 +59,6 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti private val mem_inv2 = BMapVar("mem$inv2", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Local) private val Gamma_mem_inv2 = BMapVar("Gamma_mem$inv2", MapBType(BitVecBType(64), BoolBType), Scope.Local) - - private var config: BoogieGeneratorConfig = BoogieGeneratorConfig() private val modifiedCheck: Set[BVar] = (for (i <- 19 to 29) yield { Set(BVariable("R" + i, BitVecBType(64), Scope.Global), BVariable("Gamma_R" + i, BoolBType, Scope.Global)) }).flatten.toSet ++ Set( @@ -56,8 +66,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti BVariable("Gamma_R31", BoolBType, Scope.Global) ) - def translate(boogieGeneratorConfig: BoogieGeneratorConfig): BProgram = { - config = boogieGeneratorConfig + def translate: BProgram = { val readOnlySections = program.usedMemory.values.filter(_.readOnly) val readOnlyMemory = memoryToCondition(readOnlySections) val initialSections = program.usedMemory.values.filter(!_.readOnly) @@ -71,7 +80,8 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti t.procedures.foreach(p => translatedProcedures.addOne(translateProcedure(p, readOnlyMemory, initialMemory))) translatedProcedures } - val defaultGlobals = List(BVarDecl(mem, List(externAttr)), BVarDecl(Gamma_mem, List(externAttr))) + //val defaultGlobals = (gammas ++ memories).toList.sorted.map(m => BVarDecl(m, List(externAttr))) + val defaultGlobals = List() val globalVars = procedures.flatMap(p => p.globals ++ p.freeRequires.flatMap(_.globals) ++ p.freeEnsures.flatMap(_.globals) ++ p.ensures.flatMap(_.globals) ++ p.requires.flatMap(_.globals)) val globalDecls = (globalVars.map(b => BVarDecl(b, List(externAttr))) ++ defaultGlobals).distinct.sorted.toList @@ -80,7 +90,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti val guaranteeReflexive = BProcedure( name = "guarantee_reflexive", - modifies = Set(mem, Gamma_mem), + modifies = memories, body = guaranteesReflexive.map(g => BAssert(g)), attributes = List(externAttr) ) @@ -118,17 +128,27 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti relies } else { // default case where no rely is given - rely on no external changes - List(BinaryBExpr(BVEQ, mem, Old(mem)), BinaryBExpr(BVEQ, Gamma_mem, Old(Gamma_mem))) + memories.toList.sorted.map(m => BinaryBExpr(BVEQ, m, Old(m))) } val relyEnsures = if (relies.nonEmpty) { val i = BVariable("i", BitVecBType(64), Scope.Local) - val rely2 = ForAll(List(i), BinaryBExpr(BoolIMPLIES, BinaryBExpr(BVEQ, MapAccess(mem, i), Old(MapAccess(mem, i))), BinaryBExpr(BVEQ, MapAccess(Gamma_mem, i), Old(MapAccess(Gamma_mem, i))))) - List(rely2) ++ reliesUsed + + val memImpliesGamma = memoriesGammas.keys.toList.sorted.map { memory => + val gamma = memoriesGammas(memory) + ForAll( + List(i), + BinaryBExpr(BoolIMPLIES, + BinaryBExpr(BVEQ, MapAccess(memory, i), Old(MapAccess(memory, i))), + BinaryBExpr(BVEQ, MapAccess(gamma, i), Old(MapAccess(gamma, i))) + ) + ) + } + memImpliesGamma ++ reliesUsed } else { reliesUsed } - val relyProc = BProcedure("rely", ensures = relyEnsures, freeEnsures = readOnlyMemory, modifies = Set(mem, Gamma_mem), attributes = List(externAttr)) - val relyTransitive = BProcedure("rely_transitive", ensures = reliesUsed, modifies = Set(mem, Gamma_mem), body = List(BProcedureCall("rely"), BProcedureCall("rely")), + val relyProc = BProcedure("rely", ensures = relyEnsures, freeEnsures = readOnlyMemory, modifies = memories, attributes = List(externAttr)) + val relyTransitive = BProcedure("rely_transitive", ensures = reliesUsed, modifies = memories, body = List(BProcedureCall("rely"), BProcedureCall("rely")), attributes = List(externAttr)) val relyReflexive = BProcedure("rely_reflexive", body = reliesReflexive.map(r => BAssert(r)), attributes = List(externAttr)) List(relyProc, relyTransitive, relyReflexive) diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index fe9f07b06..b1e372ec0 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -529,13 +529,13 @@ object RunUtils { val outPrograms = ArrayBuffer[BProgram]() for (thread <- ctx.program.threads) { val fileName = q.outputPrefix.stripSuffix(".bpl") + "_" + thread.entry.name + ".bpl" - val boogieTranslator = IRToBoogie(ctx.program, ctx.specification, Some(thread), fileName, regionInjector) - outPrograms.addOne(boogieTranslator.translate(q.boogieTranslation)) + val boogieTranslator = IRToBoogie(ctx.program, ctx.specification, Some(thread), fileName, regionInjector, q.boogieTranslation) + outPrograms.addOne(boogieTranslator.translate) } outPrograms } else { - val boogieTranslator = IRToBoogie(ctx.program, ctx.specification, None, q.outputPrefix, regionInjector) - ArrayBuffer(boogieTranslator.translate(q.boogieTranslation)) + val boogieTranslator = IRToBoogie(ctx.program, ctx.specification, None, q.outputPrefix, regionInjector, q.boogieTranslation) + ArrayBuffer(boogieTranslator.translate) } assert(invariant.singleCallBlockEnd(ctx.program)) From e4e2682133942d93dc4ad8b230630a368c2f565d Mon Sep 17 00:00:00 2001 From: l-kent Date: Wed, 23 Oct 2024 13:55:21 +1000 Subject: [PATCH 089/104] correct regions in Boogie output for secure update and guarantee checks, remove guarantee checks relating to unused variables --- src/main/scala/boogie/BVisitor.scala | 4 +- .../scala/specification/Specification.scala | 13 +- src/main/scala/translating/IRToBoogie.scala | 164 +++++++++++------- 3 files changed, 109 insertions(+), 72 deletions(-) diff --git a/src/main/scala/boogie/BVisitor.scala b/src/main/scala/boogie/BVisitor.scala index aa17df0e0..77f8ae274 100644 --- a/src/main/scala/boogie/BVisitor.scala +++ b/src/main/scala/boogie/BVisitor.scala @@ -70,7 +70,7 @@ class ResolveSpec(regionInjector: Option[RegionInjector]) extends SpecResolution override def visitSpecGamma(node: SpecGamma): GammaLoad = { val gammaMemory = if (regionInjector.isDefined) { - regionInjector.get.getMergedRegion(node.global.address) match { + regionInjector.get.getMergedRegion(node.address) match { case Some(region) => BMapVar(s"Gamma_${region.name}", MapBType(BitVecBType(64), BoolBType), Scope.Global) case None => gammaMem } @@ -82,7 +82,7 @@ class ResolveSpec(regionInjector: Option[RegionInjector]) extends SpecResolution override def visitArrayAccess(node: ArrayAccess): BMemoryLoad = { val memory = if (regionInjector.isDefined) { - regionInjector.get.getMergedRegion(node.global.address + node.offset) match { + regionInjector.get.getMergedRegion(node.address) match { case Some(region) => BMapVar(region.name, MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) case None => mem } diff --git a/src/main/scala/specification/Specification.scala b/src/main/scala/specification/Specification.scala index 1bb536167..5cfdb7762 100644 --- a/src/main/scala/specification/Specification.scala +++ b/src/main/scala/specification/Specification.scala @@ -5,19 +5,22 @@ import ir.* import util.Logger trait SpecVar extends BExpr { + val address: BigInt override def getType: BType = { throw new Exception("getType called on SpecVar") } } -trait SpecGlobalOrAccess extends SpecVar { +trait SpecGlobalOrAccess extends SpecVar with Ordered[SpecGlobalOrAccess] { val toAddrVar: BExpr val toOldVar: BVar val toOldGamma: BVar val size: Int + + def compare(that: SpecGlobalOrAccess): Int = address.compare(that.address) } -case class SpecGlobal(name: String, override val size: Int, arraySize: Option[Int], address: BigInt) +case class SpecGlobal(name: String, override val size: Int, arraySize: Option[Int], override val address: BigInt) extends SpecGlobalOrAccess { override def specGlobals: Set[SpecGlobalOrAccess] = Set(this) override val toAddrVar: BVar = BVariable("$" + s"${name}_addr", BitVecBType(64), Scope.Const) @@ -28,12 +31,14 @@ case class SpecGlobal(name: String, override val size: Int, arraySize: Option[In } case class SpecGamma(global: SpecGlobal) extends SpecVar { + override val address = global.address override def acceptVisit(visitor: BVisitor): BExpr = visitor.visitSpecGamma(this) } case class ArrayAccess(global: SpecGlobal, index: Int) extends SpecGlobalOrAccess { - override val size: Int = global.size val offset = index * (global.size / 8) + override val address = global.address + offset + override val size: Int = global.size override val toOldVar: BVar = BVariable(s"${global.name}$$${index}_old", BitVecBType(global.size), Scope.Local) override val toAddrVar: BExpr = BinaryBExpr(BVADD, global.toAddrVar, BitVecBLiteral(offset, 64)) override val toOldGamma: BVar = BVariable(s"Gamma_${global.name}$$${index}_old", BoolBType, Scope.Local) @@ -49,8 +54,6 @@ case class Specification( subroutines: List[SubroutineSpec], directFunctions: Set[FunctionOp] ) { - val guaranteeOldVars: List[SpecGlobalOrAccess] = guarantees.flatMap(g => g.oldSpecGlobals) - val controls: Map[SpecGlobalOrAccess, Set[SpecGlobal]] = { val controlledBy = LPreds.map((k, v) => k -> v.specGlobals).collect { case (k, v) if v.nonEmpty => (k, v) } controlledBy.toSet.flatMap((k, v) => v.map(_ -> k)).groupMap(_(0))(_(1)) diff --git a/src/main/scala/translating/IRToBoogie.scala b/src/main/scala/translating/IRToBoogie.scala index 3b03e529a..65f1bb8dd 100644 --- a/src/main/scala/translating/IRToBoogie.scala +++ b/src/main/scala/translating/IRToBoogie.scala @@ -21,10 +21,10 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti private val relies = spec.relies.map(resolveSpec.visitBExpr) private val reliesParam = spec.relies.map(ResolveSpecParam.visitBExpr) private val reliesReflexive = spec.relies.map(removeOld.visitBExpr) - private val guarantees = spec.guarantees.map(resolveOld.visitBExpr) + private val guarantees = spec.guarantees.map(g => resolveOld.visitBExpr(g) -> g.oldSpecGlobals).toMap + private val guaranteeRegions = guarantees.keys.map(g => g -> g.globals).toMap private val guaranteesParam = spec.guarantees.map(ResolveSpecParam.visitBExpr) private val guaranteesReflexive = spec.guarantees.map(removeOld.visitBExpr) - private val guaranteeOldVars = spec.guaranteeOldVars private val LPreds = spec.LPreds.map((k, v) => k -> resolveSpecL.visitBExpr(v)) private val requires = spec.subroutines.map(s => s.name -> s.requires.map(resolveSpec.visitBExpr)).toMap private val requiresDirect = spec.subroutines.map(s => s.name -> s.requiresDirect).toMap @@ -292,7 +292,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti val in = List(memVar, indexVar, valueVar) val out = BParam(memType) val body: BExpr = config.memoryFunctionType match { - case BoogieMemoryAccessMode.SuccessiveStoreSelect => { + case BoogieMemoryAccessMode.SuccessiveStoreSelect => val indices: Seq[BExpr] = for (i <- 0 until m.accesses) yield { if (i == 0) { indexVar @@ -312,18 +312,19 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti } indiceValues.tail.foldLeft(MapUpdate(memVar, indices.head, valuesEndian.head)) { - (update: MapUpdate, next: (BExpr, BExpr)) => MapUpdate(update, next._1, next._2) + (update: MapUpdate, next: (BExpr, BExpr)) => MapUpdate(update, next(0), next(1)) } - } + case BoogieMemoryAccessMode.LambdaStoreSelect => - if m.accesses == 1 then + if (m.accesses == 1) { MapUpdate(memVar, indexVar, valueVar) - else { + } else { val i = BVariable("i", BitVecBType(m.addressSize), Scope.Local) Lambda(List(i), IfThenElse( BInBounds(indexVar, BitVecBLiteral(m.accesses, m.addressSize), m.endian, i), BByteExtract(valueVar, BinaryBExpr(BVSUB, i, indexVar)), - MapAccess(memVar, i))) + MapAccess(memVar, i)) + ) } } @@ -345,7 +346,8 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti Lambda(List(i), IfThenElse( BInBounds(indexVar, BitVecBLiteral(g.accesses, g.addressSize), Endian.LittleEndian, i), valueVar, - MapAccess(gammaMapVar, i))) + MapAccess(gammaMapVar, i)) + ) } case BoogieMemoryAccessMode.SuccessiveStoreSelect => val indices: Seq[BExpr] = for (i <- 0 until g.accesses) yield { @@ -362,7 +364,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti (indices(i), values(i)) } indiceValues.tail.foldLeft(MapUpdate(gammaMapVar, indices.head, values.head)) { - (update: MapUpdate, next: (BExpr, BExpr)) => MapUpdate(update, next._1, next._2) + (update: MapUpdate, next: (BExpr, BExpr)) => MapUpdate(update, next(0), next(1)) } } @@ -371,26 +373,23 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti val memoryVar = BParam("memory", l.memoryType) val indexVar = BParam("index", l.indexType) val body: BExpr = LPreds.keys.foldLeft(FalseBLiteral) { (ite: BExpr, next: SpecGlobal) => - { - val guard = next.arraySize match { - case Some(size: Int) => - val initial: BExpr = BinaryBExpr(BoolEQ, indexVar, ArrayAccess(next, 0).toAddrVar) - val indices = 1 until size - indices.foldLeft(initial) { (or: BExpr, i: Int) => - { - BinaryBExpr(BoolOR, BinaryBExpr(BoolEQ, indexVar, ArrayAccess(next, i).toAddrVar), or) - } - } - case None => BinaryBExpr(BoolEQ, indexVar, next.toAddrVar) - } - val LPred = LPreds(next) - /*if (controlled.contains(next)) { - FunctionCall(s"L_${next.name}", List(l.memory), BoolType) - } else { - LPreds(next) - } */ - IfThenElse(guard, LPred, ite) + val guard = next.arraySize match { + case Some(size: Int) => + val initial: BExpr = BinaryBExpr(BoolEQ, indexVar, ArrayAccess(next, 0).toAddrVar) + val indices = 1 until size + indices.foldLeft(initial) { (or: BExpr, i: Int) => + BinaryBExpr(BoolOR, BinaryBExpr(BoolEQ, indexVar, ArrayAccess(next, i).toAddrVar), or) + } + case None => + BinaryBExpr(BoolEQ, indexVar, next.toAddrVar) } + val LPred = LPreds(next) + /*if (controlled.contains(next)) { + FunctionCall(s"L_${next.name}", List(l.memory), BoolType) + } else { + LPreds(next) + } */ + IfThenElse(guard, LPred, ite) } BFunction("L", List(memoryVar, indexVar), BParam(BoolBType), Some(body), List(externAttr)) case b: ByteExtract => @@ -399,10 +398,13 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti val in = List(valueVar, offsetVar) val out = BParam(BitVecBType(8)) val shift = BinaryBExpr(BVMUL, offsetVar, BitVecBLiteral(8, b.offsetSize)) - val eshift = - if (b.valueSize < b.offsetSize) BVExtract(b.valueSize, 0, shift) - else if (b.valueSize == b.offsetSize) shift - else BVZeroExtend(b.valueSize - b.offsetSize, shift) + val eshift = if (b.valueSize < b.offsetSize) { + BVExtract(b.valueSize, 0, shift) + } else if (b.valueSize == b.offsetSize) { + shift + } else { + BVZeroExtend(b.valueSize - b.offsetSize, shift) + } val body = BVExtract(8, 0, BinaryBExpr(BVLSHR, valueVar, eshift)) BFunction(b.fnName, in, out, Some(body), List(inlineAttr)) case b: InBounds => @@ -432,7 +434,6 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti } def pushUpModifiesFixedPoint(procedures: List[BProcedure]): List[BProcedure] = { - var changed = true var proceduresUpdated = procedures while (changed) { @@ -461,12 +462,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti def translateProcedure(p: Procedure, readOnlyMemory: List[BExpr], initialMemory: List[BExpr]): BProcedure = { val body = (p.entryBlock.view ++ p.blocks.filterNot(x => p.entryBlock.contains(x))).map(translateBlock).toList - val callsRely: Boolean = body.flatMap(_.body).exists(_ match - case BProcedureCall("rely", lhs, params, comment) => true - case _ => false) - - val modifies: Seq[BVar] = p.modifies.toSeq - .flatMap { + val modifies: Seq[BVar] = p.modifies.toSeq.flatMap { case m: Memory => Seq(m.toBoogie, m.toGamma) case r: Register => Seq(r.toBoogie, r.toGamma) }.distinct.sorted @@ -634,7 +630,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti * Procedures with no precond and the predicate as their postcond are generated to encode two-state assumptions. * */ - (libRelies.keySet ++ libGuarantees.keySet).filter(x => libRelies(x).nonEmpty && libGuarantees(x).nonEmpty).map(targetName => { + (libRelies.keySet ++ libGuarantees.keySet).filter(x => libRelies(x).nonEmpty && libGuarantees(x).nonEmpty).map { targetName => val Rc: BExpr = resolveSpec.visitBExpr(spec.relies.reduce((a, b) => BinaryBExpr(BoolAND, a, b))) val Gc: BExpr = resolveSpec.visitBExpr(spec.guarantees.reduce((a, b) => BinaryBExpr(BoolAND, a, b))) @@ -663,20 +659,19 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti )) targetName -> Seq(procInv, proc2, procGf, proc4, proc5) - }).toMap + }.toMap } - def relyfun(targetName: String) : Option[IfCmd] = { - libRGFunsContradictionProof.get(targetName).map(proc => - { - IfCmd(StarBLiteral, List( - BProcedureCall(proc(0).name, Seq(), Seq()), - BProcedureCall(proc(1).name, Seq(), Seq()), - BAssert(FalseBLiteral) - )) - } - ) + def relyfun(targetName: String): Option[IfCmd] = { + libRGFunsContradictionProof.get(targetName).map { proc => + IfCmd(StarBLiteral, List( + BProcedureCall(proc(0).name, Seq(), Seq()), + BProcedureCall(proc(1).name, Seq(), Seq()), + BAssert(FalseBLiteral) + )) + } } + def translate(j: Jump): List[BCmd] = j match { case g: GoTo => // collects all targets of the goto with a branch condition that we need to check the security level for @@ -694,8 +689,8 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti } val jump = GoToCmd(g.targets.map(_.label).toSeq) conditionAssert :+ jump - case r: Return => List(ReturnCmd) - case r: Unreachable => List(BAssume(FalseBLiteral)) + case _: Return => List(ReturnCmd) + case _: Unreachable => List(BAssume(FalseBLiteral)) } def translate(j: Call): List[BCmd] = j match { @@ -720,7 +715,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti def translate(s: Statement): List[BCmd] = s match { case d: Call => translate(d) - case m: NOP => List.empty + case _: NOP => List.empty case m: MemoryAssign => val lhs = m.mem.toBoogie val rhs = BMemoryStore(m.mem.toBoogie, m.index.toBoogie, m.value.toBoogie, m.endian, m.size) @@ -733,22 +728,59 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti case _ => List.empty } m.mem match { - case s: StackMemory => + case _: StackMemory => List(store) ++ stateSplit - case s: SharedMemory => + case _: SharedMemory => val rely = BProcedureCall("rely") val gammaValueCheck = BAssert(BinaryBExpr(BoolIMPLIES, L(lhs, rhs.index), m.value.toGamma)) - val oldAssigns = - guaranteeOldVars.map(g => AssignCmd(g.toOldVar, BMemoryLoad(lhs, g.toAddrVar, Endian.LittleEndian, g.size))) - val oldGammaAssigns = controlled.map(g => + val oldVars = guarantees.keys.view.toSet.flatMap { g => + if (guaranteeRegions(g).contains(lhs)) { + guarantees(g) + } else { + Set() + } + } + val oldAssigns = oldVars.toList.sorted.map { g => + val memory = if (regionInjector.isDefined) { + regionInjector.get.getMergedRegion(g.address) match { + case Some(region) => BMapVar(region.name, MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) + case None => lhs + } + } else { + lhs + } + AssignCmd(g.toOldVar, BMemoryLoad(memory, g.toAddrVar, Endian.LittleEndian, g.size)) + } + val oldGammaAssigns = controlled.map { g => + val (memory, gamma) = if (regionInjector.isDefined) { + regionInjector.get.getMergedRegion(g.address) match { + case Some(region) => + (BMapVar(region.name, MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global), + BMapVar(s"Gamma_${region.name}", MapBType(BitVecBType(64), BoolBType), Scope.Global)) + case None => + (lhs, lhsGamma) + } + } else { + (lhs, lhsGamma) + } AssignCmd( g.toOldGamma, - BinaryBExpr(BoolOR, GammaLoad(lhsGamma, g.toAddrVar, g.size, g.size / m.mem.valueSize), L(lhs, g.toAddrVar)) + BinaryBExpr(BoolOR, GammaLoad(gamma, g.toAddrVar, g.size, g.size / m.mem.valueSize), L(memory, g.toAddrVar)) ) - ) - val secureUpdate = for (c <- controls.keys) yield { + } + val secureUpdate = for (c <- controls.keys.view.toSeq.sorted) yield { val addrCheck = BinaryBExpr(BVEQ, rhs.index, c.toAddrVar) - val checks = controls(c).map(v => BinaryBExpr(BoolIMPLIES, L(lhs, v.toAddrVar), v.toOldGamma)).toList + val checks = controls(c).toList.sorted.map { v => + val memory = if (regionInjector.isDefined) { + regionInjector.get.getMergedRegion(v.address) match { + case Some(region) => BMapVar(region.name, MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) + case None => lhs + } + } else { + lhs + } + BinaryBExpr(BoolIMPLIES, L(memory, v.toAddrVar), v.toOldGamma) + } val checksAnd = if (checks.size > 1) { checks.tail.foldLeft(checks.head)((next: BExpr, ands: BExpr) => BinaryBExpr(BoolAND, next, ands)) } else { @@ -756,7 +788,9 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti } BAssert(BinaryBExpr(BoolIMPLIES, addrCheck, checksAnd)) } - val guaranteeChecks = guarantees.map(v => BAssert(v)) + val guaranteeChecks = guarantees.keys.collect { + case g if guaranteeRegions(g).contains(lhs) => BAssert(g) + } (List(rely, gammaValueCheck) ++ oldAssigns ++ oldGammaAssigns :+ store) ++ secureUpdate ++ guaranteeChecks ++ stateSplit } case l: Assign => From cbbe539b6232087d66930785d579bafa8044a36e Mon Sep 17 00:00:00 2001 From: l-kent Date: Wed, 23 Oct 2024 14:49:13 +1000 Subject: [PATCH 090/104] properly collect Boogie global variables --- src/main/scala/boogie/BExpr.scala | 4 +- src/main/scala/boogie/BProgram.scala | 17 ++++-- src/main/scala/translating/IRToBoogie.scala | 57 ++++++++++----------- 3 files changed, 41 insertions(+), 37 deletions(-) diff --git a/src/main/scala/boogie/BExpr.scala b/src/main/scala/boogie/BExpr.scala index 5957cb0e2..820ad84d1 100644 --- a/src/main/scala/boogie/BExpr.scala +++ b/src/main/scala/boogie/BExpr.scala @@ -628,7 +628,7 @@ case class L(memory: BMapVar, index: BExpr) extends BExpr { override def toString: String = s"L($memory, $index)" override val getType: BType = BoolBType override def functionOps: Set[FunctionOp] = index.functionOps + LOp(memory.getType, index.getType) - override def locals: Set[BVar] = index.locals - override def globals: Set[BVar] = index.globals + override def locals: Set[BVar] = index.locals ++ memory.locals + override def globals: Set[BVar] = index.globals ++ memory.globals override def loads: Set[BExpr] = index.loads } diff --git a/src/main/scala/boogie/BProgram.scala b/src/main/scala/boogie/BProgram.scala index fe6d19e0b..b8e276bf6 100644 --- a/src/main/scala/boogie/BProgram.scala +++ b/src/main/scala/boogie/BProgram.scala @@ -75,12 +75,19 @@ case class BProcedure( procList ++ implList ++ List("") } override def toString: String = toBoogie.mkString("\n") - def functionOps: Set[FunctionOp] = - body.flatMap(c => c.functionOps).toSet ++ ensures.flatMap(c => c.functionOps).toSet ++ requires - .flatMap(c => c.functionOps) - .toSet ++ freeEnsures.flatMap(c => c.functionOps).toSet ++ freeRequires.flatMap(c => c.functionOps).toSet + def functionOps: Set[FunctionOp] = { + val bodyOps = body.flatMap(_.functionOps) + val ensuresOps = ensures.flatMap(_.functionOps) ++ freeEnsures.flatMap(_.functionOps) + val requiresOps = requires.flatMap(_.functionOps) ++ freeRequires.flatMap(_.functionOps) + (bodyOps ++ ensuresOps ++ requiresOps).toSet + } - def globals: Set[BVar] = body.flatMap(c => c.globals).toSet ++ modifies + def globals: Set[BVar] = { + val bodyGlobals = body.flatMap(_.globals) + val ensuresGlobals = ensures.flatMap(_.globals) ++ freeEnsures.flatMap(_.globals) + val requiresGlobals = requires.flatMap(_.globals) ++ freeRequires.flatMap(_.globals) + (bodyGlobals ++ ensuresGlobals ++ requiresGlobals).toSet ++ modifies + } } case class BAxiom(body: BExpr, override val attributes: List[BAttribute] = List()) extends BDeclaration { diff --git a/src/main/scala/translating/IRToBoogie.scala b/src/main/scala/translating/IRToBoogie.scala index 65f1bb8dd..44c0c503b 100644 --- a/src/main/scala/translating/IRToBoogie.scala +++ b/src/main/scala/translating/IRToBoogie.scala @@ -72,21 +72,14 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti val initialSections = program.usedMemory.values.filter(!_.readOnly) val initialMemory = memoryToCondition(initialSections) - val procedures = thread match { + val procedures: ArrayBuffer[BProcedure] = thread match { case None => program.procedures.map(f => translateProcedure(f, readOnlyMemory, initialMemory)) case Some(t) => - val translatedProcedures = ArrayBuffer[BProcedure]() + val translatedProcedures: ArrayBuffer[BProcedure] = ArrayBuffer[BProcedure]() t.procedures.foreach(p => translatedProcedures.addOne(translateProcedure(p, readOnlyMemory, initialMemory))) translatedProcedures } - //val defaultGlobals = (gammas ++ memories).toList.sorted.map(m => BVarDecl(m, List(externAttr))) - val defaultGlobals = List() - val globalVars = procedures.flatMap(p => p.globals ++ p.freeRequires.flatMap(_.globals) ++ p.freeEnsures.flatMap(_.globals) ++ p.ensures.flatMap(_.globals) ++ p.requires.flatMap(_.globals)) - val globalDecls = (globalVars.map(b => BVarDecl(b, List(externAttr))) ++ defaultGlobals).distinct.sorted.toList - - val globalConsts: List[BConstAxiomPair] = - globals.map(g => BConstAxiomPair(BVarDecl(g.toAddrVar, List(externAttr)), g.toAxiom)).toList.sorted val guaranteeReflexive = BProcedure( name = "guarantee_reflexive", @@ -119,6 +112,13 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti val functionsUsed4 = functionsUsed3.flatMap(p => p.functionOps).map(p => functionOpToDefinition(p)) val functionsUsed = (functionsUsed2 ++ functionsUsed3 ++ functionsUsed4).toList.sorted + val globalVars = procedures.flatMap(_.globals) ++ rgProcs.flatMap(_.globals) + val globalDecls = globalVars.map(b => BVarDecl(b, List(externAttr))).distinct.sorted.toList + + val globalConsts: List[BConstAxiomPair] = globals.map { g => + BConstAxiomPair(BVarDecl(g.toAddrVar, List(externAttr)), g.toAxiom) + }.toList.sorted + val declarations = globalDecls ++ globalConsts ++ functionsUsed ++ rgLib ++ pushUpModifiesFixedPoint(rgProcs ++ procedures) BProgram(declarations, filename) } @@ -154,7 +154,6 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti List(relyProc, relyTransitive, relyReflexive) } - def genRelyInv: BProcedure = { val reliesUsed = if (reliesParam.nonEmpty) { reliesParam @@ -439,21 +438,19 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti while (changed) { changed = false val nameToProcedure = proceduresUpdated.map(p => p.name -> p).toMap - proceduresUpdated = proceduresUpdated.map( - procedure => { - val cmds: List[BCmd] = procedure.body.flatten { - case b: BBlock => b.body - case c: BCmd => Seq(c) - } - val callModifies = cmds.collect { case c: BProcedureCall => nameToProcedure(c.name) }.flatMap(_.modifies) - val modifiesUpdate = procedure.modifies ++ callModifies - if (modifiesUpdate != procedure.modifies) { - changed = true - } - - procedure.copy(modifies = modifiesUpdate) + proceduresUpdated = proceduresUpdated.map { procedure => + val cmds: List[BCmd] = procedure.body.flatten { + case b: BBlock => b.body + case c: BCmd => Seq(c) + } + val callModifies = cmds.collect { case c: BProcedureCall => nameToProcedure(c.name) }.flatMap(_.modifies) + val modifiesUpdate = procedure.modifies ++ callModifies + if (modifiesUpdate != procedure.modifies) { + changed = true } - ) + + procedure.copy(modifies = modifiesUpdate) + } } proceduresUpdated } @@ -744,10 +741,10 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti val memory = if (regionInjector.isDefined) { regionInjector.get.getMergedRegion(g.address) match { case Some(region) => BMapVar(region.name, MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) - case None => lhs + case None => mem } } else { - lhs + mem } AssignCmd(g.toOldVar, BMemoryLoad(memory, g.toAddrVar, Endian.LittleEndian, g.size)) } @@ -758,10 +755,10 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti (BMapVar(region.name, MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global), BMapVar(s"Gamma_${region.name}", MapBType(BitVecBType(64), BoolBType), Scope.Global)) case None => - (lhs, lhsGamma) + (mem, Gamma_mem) } } else { - (lhs, lhsGamma) + (mem, Gamma_mem) } AssignCmd( g.toOldGamma, @@ -774,10 +771,10 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti val memory = if (regionInjector.isDefined) { regionInjector.get.getMergedRegion(v.address) match { case Some(region) => BMapVar(region.name, MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) - case None => lhs + case None => mem } } else { - lhs + mem } BinaryBExpr(BoolIMPLIES, L(memory, v.toAddrVar), v.toOldGamma) } From 6aa0f4283a5ba13c1c6bf3d7d0882b0a31dcc90b Mon Sep 17 00:00:00 2001 From: l-kent Date: Thu, 24 Oct 2024 09:19:51 +1000 Subject: [PATCH 091/104] have L function in Boogie take in all relevant memories as parameters, comment out SummaryGenerator since it needs significant work to be compatible with these changes --- .../scala/analysis/SummaryGenerator.scala | 4 +- src/main/scala/boogie/BExpr.scala | 42 ++++++++-- src/main/scala/boogie/BVisitor.scala | 35 ++++---- src/main/scala/ir/Expr.scala | 38 +++------ src/main/scala/ir/Program.scala | 8 +- src/main/scala/translating/IRToBoogie.scala | 81 ++++++++++++------- .../translating/SpecificationLoader.scala | 37 ++++----- src/main/scala/util/RunUtils.scala | 4 + 8 files changed, 143 insertions(+), 106 deletions(-) diff --git a/src/main/scala/analysis/SummaryGenerator.scala b/src/main/scala/analysis/SummaryGenerator.scala index 552295f23..b264755dd 100644 --- a/src/main/scala/analysis/SummaryGenerator.scala +++ b/src/main/scala/analysis/SummaryGenerator.scala @@ -1,3 +1,4 @@ +/* package analysis import analysis.* @@ -95,7 +96,7 @@ class SummaryGenerator( private def toGamma(variable: Taintable): Option[BExpr] = { variable match { - case variable: Register => Some(variable.toGamma) + case variable: Register => Some(variable.toGamma()) case variable: LocalVar => None case variable: GlobalVariable => Some(variable.toGamma) //case variable: LocalStackVariable => None @@ -185,3 +186,4 @@ class SummaryGenerator( } } } +*/ \ No newline at end of file diff --git a/src/main/scala/boogie/BExpr.scala b/src/main/scala/boogie/BExpr.scala index 820ad84d1..d334d6800 100644 --- a/src/main/scala/boogie/BExpr.scala +++ b/src/main/scala/boogie/BExpr.scala @@ -10,6 +10,7 @@ trait BExpr { def functionOps: Set[FunctionOp] = Set() def locals: Set[BVar] = Set() def globals: Set[BVar] = Set() + def params: Set[BVar] = Set() def specGlobals: Set[SpecGlobalOrAccess] = Set() def oldSpecGlobals: Set[SpecGlobalOrAccess] = Set() def loads: Set[BExpr] = Set() @@ -52,6 +53,7 @@ case class BVExtract(end: Int, start: Int, body: BExpr) extends BExpr { override def functionOps: Set[FunctionOp] = body.functionOps override def locals: Set[BVar] = body.locals override def globals: Set[BVar] = body.globals + override def params: Set[BVar] = body.params override def specGlobals: Set[SpecGlobalOrAccess] = body.specGlobals override def oldSpecGlobals: Set[SpecGlobalOrAccess] = body.oldSpecGlobals override def loads: Set[BExpr] = body.loads @@ -89,6 +91,7 @@ case class BVRepeat(repeats: Int, body: BExpr) extends BExpr { } override def locals: Set[BVar] = body.locals override def globals: Set[BVar] = body.globals + override def params: Set[BVar] = body.params override def specGlobals: Set[SpecGlobalOrAccess] = body.specGlobals override def oldSpecGlobals: Set[SpecGlobalOrAccess] = body.oldSpecGlobals override def loads: Set[BExpr] = body.loads @@ -120,6 +123,7 @@ case class BVZeroExtend(extension: Int, body: BExpr) extends BExpr { } override def locals: Set[BVar] = body.locals override def globals: Set[BVar] = body.globals + override def params: Set[BVar] = body.params override def specGlobals: Set[SpecGlobalOrAccess] = body.specGlobals override def oldSpecGlobals: Set[SpecGlobalOrAccess] = body.oldSpecGlobals override def loads: Set[BExpr] = body.loads @@ -152,6 +156,7 @@ case class BVSignExtend(extension: Int, body: BExpr) extends BExpr { } override def locals: Set[BVar] = body.locals override def globals: Set[BVar] = body.globals + override def params: Set[BVar] = body.params override def specGlobals: Set[SpecGlobalOrAccess] = body.specGlobals override def oldSpecGlobals: Set[SpecGlobalOrAccess] = body.oldSpecGlobals override def loads: Set[BExpr] = body.loads @@ -176,6 +181,10 @@ abstract class BVar(val name: String, val bType: BType, val scope: Scope) extend case Scope.Global => Set(this) case _ => Set() } + override def params: Set[BVar] = scope match { + case Scope.Parameter => Set(this) + case _ => Set() + } } case class BVariable(override val name: String, override val bType: BType, override val scope: Scope) @@ -211,6 +220,7 @@ case class BFunctionCall(name: String, args: List[BExpr], outType: BType, uninte } override def locals: Set[BVar] = args.flatMap(a => a.locals).toSet override def globals: Set[BVar] = args.flatMap(a => a.globals).toSet + override def params: Set[BVar] = args.flatMap(a => a.params).toSet override def specGlobals: Set[SpecGlobalOrAccess] = args.flatMap(a => a.specGlobals).toSet override def oldSpecGlobals: Set[SpecGlobalOrAccess] = args.flatMap(a => a.oldSpecGlobals).toSet override def loads: Set[BExpr] = args.flatMap(a => a.loads).toSet @@ -247,6 +257,7 @@ case class UnaryBExpr(op: UnOp, arg: BExpr) extends BExpr { override def locals: Set[BVar] = arg.locals override def globals: Set[BVar] = arg.globals + override def params: Set[BVar] = arg.params override def specGlobals: Set[SpecGlobalOrAccess] = arg.specGlobals override def oldSpecGlobals: Set[SpecGlobalOrAccess] = arg.oldSpecGlobals override def loads: Set[BExpr] = arg.loads @@ -352,6 +363,7 @@ case class BinaryBExpr(op: BinOp, arg1: BExpr, arg2: BExpr) extends BExpr { override def locals: Set[BVar] = arg1.locals ++ arg2.locals override def globals: Set[BVar] = arg1.globals ++ arg2.globals + override def params: Set[BVar] = arg1.params ++ arg2.params override def specGlobals: Set[SpecGlobalOrAccess] = arg1.specGlobals ++ arg2.specGlobals override def oldSpecGlobals: Set[SpecGlobalOrAccess] = arg1.oldSpecGlobals ++ arg2.oldSpecGlobals override def loads: Set[BExpr] = arg1.loads ++ arg2.loads @@ -372,6 +384,7 @@ case class IfThenElse(guard: BExpr, thenExpr: BExpr, elseExpr: BExpr) extends BE override def functionOps: Set[FunctionOp] = guard.functionOps ++ thenExpr.functionOps ++ elseExpr.functionOps override def locals: Set[BVar] = guard.locals ++ thenExpr.locals ++ elseExpr.locals override def globals: Set[BVar] = guard.globals ++ thenExpr.globals ++ elseExpr.globals + override def params: Set[BVar] = guard.params ++ thenExpr.params ++ elseExpr.params override def specGlobals: Set[SpecGlobalOrAccess] = guard.specGlobals ++ thenExpr.specGlobals ++ elseExpr.specGlobals override def oldSpecGlobals: Set[SpecGlobalOrAccess] = guard.oldSpecGlobals ++ thenExpr.oldSpecGlobals ++ elseExpr.oldSpecGlobals @@ -389,6 +402,7 @@ trait QuantifierExpr(sort: Quantifier, bound: List[BVar], body: BExpr) extends B override def functionOps: Set[FunctionOp] = body.functionOps override def locals: Set[BVar] = body.locals -- bound.toSet override def globals: Set[BVar] = body.globals -- bound.toSet + override def params: Set[BVar] = body.params -- bound.toSet override def specGlobals: Set[SpecGlobalOrAccess] = body.specGlobals override def oldSpecGlobals: Set[SpecGlobalOrAccess] = body.oldSpecGlobals override def loads: Set[BExpr] = body.loads @@ -412,6 +426,7 @@ case class Old(body: BExpr) extends BExpr { override def functionOps: Set[FunctionOp] = body.functionOps override def locals: Set[BVar] = body.locals override def globals: Set[BVar] = body.globals + override def params: Set[BVar] = body.params override def oldSpecGlobals: Set[SpecGlobalOrAccess] = body.specGlobals override def loads: Set[BExpr] = body.loads override def acceptVisit(visitor: BVisitor): BExpr = visitor.visitOld(this) @@ -423,6 +438,7 @@ case class MapAccess(mapVar: BMapVar, index: BExpr) extends BExpr { override def functionOps: Set[FunctionOp] = index.functionOps override def locals: Set[BVar] = index.locals override def globals: Set[BVar] = index.globals ++ mapVar.globals + override def params: Set[BVar] = index.params ++ mapVar.params override def loads: Set[BExpr] = index.loads } @@ -432,6 +448,7 @@ case class MapUpdate(map: BExpr, index: BExpr, value: BExpr) extends BExpr { override def functionOps: Set[FunctionOp] = map.functionOps ++ index.functionOps ++ value.functionOps override def locals: Set[BVar] = map.locals ++ index.locals ++ value.locals override def globals: Set[BVar] = index.globals ++ map.globals ++ value.globals + override def params: Set[BVar] = index.params ++ map.params ++ value.params override def loads: Set[BExpr] = index.loads ++ value.loads ++ map.loads } @@ -463,7 +480,7 @@ case class GammaLoadOp(addressSize: Int, bits: Int, accesses: Int) extends Funct case class GammaStoreOp(addressSize: Int, bits: Int, accesses: Int) extends FunctionOp { val fnName: String = s"gamma_store$bits" } -case class LOp(memoryType: BType, indexType: BType) extends FunctionOp +case class LOp(indexType: BType) extends FunctionOp /** * Utility to extract a particular byte from a bitvector. @@ -492,6 +509,7 @@ case class BByteExtract(value: BExpr, offset: BExpr) extends BExpr { value.functionOps ++ offset.functionOps + ByteExtract(valueSize, offsetSize) override def locals: Set[BVar] = value.locals ++ offset.locals override def globals: Set[BVar] = value.globals ++ offset.globals + override def params: Set[BVar] = value.params ++ offset.params override def loads: Set[BExpr] = value.loads ++ offset.loads } @@ -525,7 +543,8 @@ case class BInBounds(base: BExpr, len: BExpr, endian: Endian, i: BExpr) extends override def functionOps: Set[FunctionOp] = base.functionOps ++ len.functionOps ++ i.functionOps + InBounds(baseSize, endian) override def locals: Set[BVar] = base.locals ++ len.locals ++ i.locals - override def globals: Set[BVar] = base.globals ++ len.globals ++ i.globals + override def globals: Set[BVar] = base.globals ++ len.globals ++ i.globals + override def params: Set[BVar] = base.params ++ len.params ++ i.params override def loads: Set[BExpr] = base.loads ++ len.loads ++ i.loads } @@ -552,6 +571,7 @@ case class BMemoryLoad(memory: BMapVar, index: BExpr, endian: Endian, bits: Int) memory.functionOps ++ index.functionOps + MemoryLoadOp(addressSize, valueSize, endian, bits) override def locals: Set[BVar] = memory.locals ++ index.locals override def globals: Set[BVar] = index.globals ++ memory.globals + override def params: Set[BVar] = index.params ++ memory.params override def loads: Set[BExpr] = Set(this) ++ index.loads } @@ -578,6 +598,7 @@ case class BMemoryStore(memory: BMapVar, index: BExpr, value: BExpr, endian: End memory.functionOps ++ index.functionOps ++ value.functionOps + MemoryStoreOp(addressSize, valueSize, endian, bits) override def locals: Set[BVar] = memory.locals ++ index.locals ++ value.locals override def globals: Set[BVar] = index.globals ++ memory.globals ++ value.globals + override def params: Set[BVar] = index.params ++ memory.params ++ value.params override def loads: Set[BExpr] = index.loads ++ value.loads } @@ -602,6 +623,7 @@ case class GammaLoad(gammaMap: BMapVar, index: BExpr, bits: Int, accesses: Int) gammaMap.functionOps ++ index.functionOps + GammaLoadOp(addressSize, bits, accesses) override def locals: Set[BVar] = gammaMap.locals ++ index.locals override def globals: Set[BVar] = index.globals ++ gammaMap.globals + override def params: Set[BVar] = index.params ++ gammaMap.params override def loads: Set[BExpr] = Set(this) ++ index.loads } @@ -621,14 +643,20 @@ case class GammaStore(gammaMap: BMapVar, index: BExpr, value: BExpr, bits: Int, gammaMap.functionOps ++ index.functionOps ++ value.functionOps + GammaStoreOp(addressSize, bits, accesses) override def locals: Set[BVar] = gammaMap.locals ++ index.locals ++ value.locals override def globals: Set[BVar] = index.globals ++ gammaMap.globals ++ value.globals + override def params: Set[BVar] = index.params ++ gammaMap.params ++ value.params override def loads: Set[BExpr] = index.loads ++ value.loads } -case class L(memory: BMapVar, index: BExpr) extends BExpr { - override def toString: String = s"L($memory, $index)" +case class L(memories: List[BMapVar], index: BExpr) extends BExpr { + override def toString: String = if (memories.isEmpty) { + s"L($index)" + } else { + s"L(${memories.mkString(", ")}, $index)" + } override val getType: BType = BoolBType - override def functionOps: Set[FunctionOp] = index.functionOps + LOp(memory.getType, index.getType) - override def locals: Set[BVar] = index.locals ++ memory.locals - override def globals: Set[BVar] = index.globals ++ memory.globals + override def functionOps: Set[FunctionOp] = index.functionOps + LOp(index.getType) + override def locals: Set[BVar] = index.locals ++ memories.flatMap(_.locals) + override def globals: Set[BVar] = index.globals ++ memories.flatMap(_.globals) + override def params: Set[BVar] = index.params ++ memories.flatMap(_.params) override def loads: Set[BExpr] = index.loads } diff --git a/src/main/scala/boogie/BVisitor.scala b/src/main/scala/boogie/BVisitor.scala index 77f8ae274..cc8dad009 100644 --- a/src/main/scala/boogie/BVisitor.scala +++ b/src/main/scala/boogie/BVisitor.scala @@ -108,25 +108,30 @@ class RemoveOld(resolveSpec: ResolveSpec) extends SpecResolutionVisitor { override def visitArrayAccess(node: ArrayAccess): BMemoryLoad = resolveSpec.visitArrayAccess(node) } -class ResolveSpecL(resolveSpec: ResolveSpec) extends SpecResolutionVisitor { +class ResolveSpecL(regionInjector: Option[RegionInjector]) extends SpecResolutionVisitor { + private val mem_in = BMapVar("mem$in", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter) override def visitSpecGlobal(node: SpecGlobal): BMemoryLoad = { - BMemoryLoad( - BMapVar("memory", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter), - node.toAddrVar, - Endian.LittleEndian, - node.size - ) + val memory = if (regionInjector.isDefined) { + regionInjector.get.getMergedRegion(node.address) match { + case Some(region) => BMapVar(s"${region.name}$$in", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter) + case None => mem_in + } + } else { + mem_in + } + BMemoryLoad(memory, node.toAddrVar, Endian.LittleEndian, node.size) } - override def visitSpecGamma(node: SpecGamma): GammaLoad = resolveSpec.visitSpecGamma(node) - override def visitArrayAccess(node: ArrayAccess): BMemoryLoad = { - BMemoryLoad( - BMapVar("memory", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter), - node.toAddrVar, - Endian.LittleEndian, - node.global.size - ) + val memory = if (regionInjector.isDefined) { + regionInjector.get.getMergedRegion(node.address) match { + case Some(region) => BMapVar(s"${region.name}$$in", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter) + case None => mem_in + } + } else { + mem_in + } + BMemoryLoad(memory, node.toAddrVar, Endian.LittleEndian, node.size) } } diff --git a/src/main/scala/ir/Expr.scala b/src/main/scala/ir/Expr.scala index 43fde843b..c981061b8 100644 --- a/src/main/scala/ir/Expr.scala +++ b/src/main/scala/ir/Expr.scala @@ -5,21 +5,9 @@ import scala.collection.mutable sealed trait Expr { def toBoogie: BExpr - def toGamma: BExpr = { - val gammaVars: Set[BExpr] = gammas.map(_.toGamma) - if (gammaVars.isEmpty) { - TrueBLiteral - } else if (gammaVars.size == 1) { - gammaVars.head - } else { - gammaVars.tail.foldLeft(gammaVars.head) { (join: BExpr, next: BExpr) => - BinaryBExpr(BoolAND, next, join) - } - } - } def loads: Set[MemoryLoad] = Set() def getType: IRType - def gammas: Set[Expr] = Set() + def gammas: Set[Variable] = Set() // variables not including those inside a load's index def variables: Set[Variable] = Set() def acceptVisit(visitor: Visitor): Expr = throw new Exception("visitor " + visitor + " unimplemented for: " + this) } @@ -61,7 +49,7 @@ case class IntLiteral(value: BigInt) extends Literal { */ case class Extract(end: Int, start: Int, body: Expr) extends Expr { override def toBoogie: BExpr = BVExtract(end, start, body.toBoogie) - override def gammas: Set[Expr] = body.gammas + override def gammas: Set[Variable] = body.gammas override def variables: Set[Variable] = body.variables override def getType: BitVecType = BitVecType(end - start) override def toString: String = s"$body[$end:$start]" @@ -71,7 +59,7 @@ case class Extract(end: Int, start: Int, body: Expr) extends Expr { case class Repeat(repeats: Int, body: Expr) extends Expr { override def toBoogie: BExpr = BVRepeat(repeats, body.toBoogie) - override def gammas: Set[Expr] = body.gammas + override def gammas: Set[Variable] = body.gammas override def variables: Set[Variable] = body.variables override def getType: BitVecType = BitVecType(bodySize * repeats) private def bodySize: Int = body.getType match { @@ -85,7 +73,7 @@ case class Repeat(repeats: Int, body: Expr) extends Expr { case class ZeroExtend(extension: Int, body: Expr) extends Expr { override def toBoogie: BExpr = BVZeroExtend(extension, body.toBoogie) - override def gammas: Set[Expr] = body.gammas + override def gammas: Set[Variable] = body.gammas override def variables: Set[Variable] = body.variables override def getType: BitVecType = BitVecType(bodySize + extension) private def bodySize: Int = body.getType match { @@ -99,7 +87,7 @@ case class ZeroExtend(extension: Int, body: Expr) extends Expr { case class SignExtend(extension: Int, body: Expr) extends Expr { override def toBoogie: BExpr = BVSignExtend(extension, body.toBoogie) - override def gammas: Set[Expr] = body.gammas + override def gammas: Set[Variable] = body.gammas override def variables: Set[Variable] = body.variables override def getType: BitVecType = BitVecType(bodySize + extension) private def bodySize: Int = body.getType match { @@ -113,7 +101,7 @@ case class SignExtend(extension: Int, body: Expr) extends Expr { case class UnaryExpr(op: UnOp, arg: Expr) extends Expr { override def toBoogie: BExpr = UnaryBExpr(op, arg.toBoogie) - override def gammas: Set[Expr] = arg.gammas + override def gammas: Set[Variable] = arg.gammas override def variables: Set[Variable] = arg.variables override def loads: Set[MemoryLoad] = arg.loads override def getType: IRType = (op, arg.getType) match { @@ -162,7 +150,7 @@ case object BVNEG extends BVUnOp("neg") case class BinaryExpr(op: BinOp, arg1: Expr, arg2: Expr) extends Expr { override def toBoogie: BExpr = BinaryBExpr(op, arg1.toBoogie, arg2.toBoogie) - override def gammas: Set[Expr] = arg1.gammas ++ arg2.gammas + override def gammas: Set[Variable] = arg1.gammas ++ arg2.gammas override def variables: Set[Variable] = arg1.variables ++ arg2.variables override def loads: Set[MemoryLoad] = arg1.loads ++ arg2.loads override def getType: IRType = (op, arg1.getType, arg2.getType) match { @@ -306,14 +294,14 @@ enum Endian { case class MemoryLoad(mem: Memory, index: Expr, endian: Endian, size: Int) extends Expr { override def toBoogie: BMemoryLoad = BMemoryLoad(mem.toBoogie, index.toBoogie, endian, size) - override def toGamma: BExpr = mem match { + def toGamma(LArgs: List[BMapVar]): BExpr = mem match { case m: StackMemory => GammaLoad(m.toGamma, index.toBoogie, size, size / m.valueSize) case m: SharedMemory => - BinaryBExpr(BoolOR, GammaLoad(m.toGamma, index.toBoogie, size, size / m.valueSize), L(m.toBoogie, index.toBoogie)) + BinaryBExpr(BoolOR, GammaLoad(m.toGamma, index.toBoogie, size, size / m.valueSize), L(LArgs, index.toBoogie)) } override def variables: Set[Variable] = index.variables - override def gammas: Set[Expr] = Set(this) + override def gammas: Set[Variable] = Set() override def loads: Set[MemoryLoad] = Set(this) override def getType: IRType = BitVecType(size) override def toString: String = s"MemoryLoad($mem, $index, $endian, $size)" @@ -324,7 +312,6 @@ case class UninterpretedFunction(name: String, params: Seq[Expr], returnType: IR override def getType: IRType = returnType override def toBoogie: BFunctionCall = BFunctionCall(name, params.map(_.toBoogie).toList, returnType.toBoogie, true) override def acceptVisit(visitor: Visitor): Expr = visitor.visitUninterpretedFunction(this) - override def gammas: Set[Expr] = params.flatMap(_.gammas).toSet override def variables: Set[Variable] = params.flatMap(_.variables).toSet } @@ -339,10 +326,9 @@ sealed trait Variable extends Expr { override def getType: IRType = irType override def variables: Set[Variable] = Set(this) - override def gammas: Set[Expr] = Set(this) + override def gammas: Set[Variable] = Set(this) override def toBoogie: BVar - // placeholder definition not actually used - override def toGamma: BVar = BVariable(s"$name", irType.toBoogie, Scope.Global) + def toGamma: BVar override def toString: String = s"Variable($name, $irType)" diff --git a/src/main/scala/ir/Program.scala b/src/main/scala/ir/Program.scala index 49158d986..c422196d3 100644 --- a/src/main/scala/ir/Program.scala +++ b/src/main/scala/ir/Program.scala @@ -13,7 +13,7 @@ class Program(var procedures: ArrayBuffer[Procedure], val threads: ArrayBuffer[ProgramThread] = ArrayBuffer() - val usedMemory = mutable.TreeMap[BigInt, MemorySection]() + val usedMemory: mutable.Map[BigInt, MemorySection] = mutable.TreeMap() override def toString(): String = { serialiseIL(this) @@ -201,7 +201,7 @@ class Procedure private ( def returnBlock_=(value: Block): Unit = { if (!returnBlock.contains(value)) { - _returnBlock.foreach(removeBlocks(_)) + _returnBlock.foreach(removeBlocks) _returnBlock = Some(addBlocks(value)) } } @@ -210,7 +210,7 @@ class Procedure private ( def entryBlock_=(value: Block): Unit = { if (!entryBlock.contains(value)) { - _entryBlock.foreach(removeBlocks(_)) + _entryBlock.foreach(removeBlocks) _entryBlock = Some(addBlocks(value)) } } @@ -337,8 +337,6 @@ class Parameter(var name: String, var size: Int, var value: Register) { def toGamma: BVariable = BParam(s"Gamma_$name", BoolBType) } - - class Block private ( val label: String, val address: Option[BigInt], diff --git a/src/main/scala/translating/IRToBoogie.scala b/src/main/scala/translating/IRToBoogie.scala index 44c0c503b..e9ed8a4e6 100644 --- a/src/main/scala/translating/IRToBoogie.scala +++ b/src/main/scala/translating/IRToBoogie.scala @@ -15,7 +15,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti private val controls = spec.controls private val controlled = spec.controlled private val resolveSpec = ResolveSpec(regionInjector) - private val resolveSpecL = ResolveSpecL(resolveSpec) + private val resolveSpecL = ResolveSpecL(regionInjector) private val resolveOld = ResolveOld(resolveSpec) private val removeOld = RemoveOld(resolveSpec) private val relies = spec.relies.map(resolveSpec.visitBExpr) @@ -25,7 +25,6 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti private val guaranteeRegions = guarantees.keys.map(g => g -> g.globals).toMap private val guaranteesParam = spec.guarantees.map(ResolveSpecParam.visitBExpr) private val guaranteesReflexive = spec.guarantees.map(removeOld.visitBExpr) - private val LPreds = spec.LPreds.map((k, v) => k -> resolveSpecL.visitBExpr(v)) private val requires = spec.subroutines.map(s => s.name -> s.requires.map(resolveSpec.visitBExpr)).toMap private val requiresDirect = spec.subroutines.map(s => s.name -> s.requiresDirect).toMap private val ensures = spec.subroutines.map(s => s.name -> s.ensures.map(resolveSpec.visitBExpr)).toMap @@ -37,7 +36,10 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti private val mem = BMapVar("mem", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) private val Gamma_mem = BMapVar("Gamma_mem", MapBType(BitVecBType(64), BoolBType), Scope.Global) - private val memoriesGammas = if (regionInjector.isDefined) { + private val LPreds = spec.LPreds.map((k, v) => k -> resolveSpecL.visitBExpr(v)) + private val LArgs = lArgs + + private val memoriesToGamma = if (regionInjector.isDefined) { regionInjector.get.mergedRegions.values.map { region => val memory = BMapVar(region.name, MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) val gamma = BMapVar(s"Gamma_${region.name}", MapBType(BitVecBType(64), BoolBType), Scope.Global) @@ -47,7 +49,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti Map(mem -> Gamma_mem) } - private val memories: Set[BVar] = memoriesGammas.flatMap((k, v) => Set(k, v)).toSet + private val memoriesAndGammas: Set[BVar] = memoriesToGamma.flatMap((k, v) => Set(k, v)).toSet private val mem_in = BMapVar("mem$in", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Parameter) private val Gamma_mem_in = BMapVar("Gamma_mem$in", MapBType(BitVecBType(64), BoolBType), Scope.Parameter) @@ -66,6 +68,19 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti BVariable("Gamma_R31", BoolBType, Scope.Global) ) + def lArgs: List[BMapVar] = { + if (regionInjector.isDefined) { + spec.LPreds.values.flatMap(_.specGlobals).toSet.map { g => + regionInjector.get.getMergedRegion(g.address) match { + case Some(region) => BMapVar(s"${region.name}", MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) + case None => mem + } + }.toList.sorted + } else { + List(mem) + } + } + def translate: BProgram = { val readOnlySections = program.usedMemory.values.filter(_.readOnly) val readOnlyMemory = memoryToCondition(readOnlySections) @@ -83,7 +98,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti val guaranteeReflexive = BProcedure( name = "guarantee_reflexive", - modifies = memories, + modifies = memoriesAndGammas, body = guaranteesReflexive.map(g => BAssert(g)), attributes = List(externAttr) ) @@ -128,13 +143,13 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti relies } else { // default case where no rely is given - rely on no external changes - memories.toList.sorted.map(m => BinaryBExpr(BVEQ, m, Old(m))) + memoriesAndGammas.toList.sorted.map(m => BinaryBExpr(BVEQ, m, Old(m))) } val relyEnsures = if (relies.nonEmpty) { val i = BVariable("i", BitVecBType(64), Scope.Local) - val memImpliesGamma = memoriesGammas.keys.toList.sorted.map { memory => - val gamma = memoriesGammas(memory) + val memImpliesGamma = memoriesToGamma.keys.toList.sorted.map { memory => + val gamma = memoriesToGamma(memory) ForAll( List(i), BinaryBExpr(BoolIMPLIES, @@ -147,8 +162,8 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti } else { reliesUsed } - val relyProc = BProcedure("rely", ensures = relyEnsures, freeEnsures = readOnlyMemory, modifies = memories, attributes = List(externAttr)) - val relyTransitive = BProcedure("rely_transitive", ensures = reliesUsed, modifies = memories, body = List(BProcedureCall("rely"), BProcedureCall("rely")), + val relyProc = BProcedure("rely", ensures = relyEnsures, freeEnsures = readOnlyMemory, modifies = memoriesAndGammas, attributes = List(externAttr)) + val relyTransitive = BProcedure("rely_transitive", ensures = reliesUsed, modifies = memoriesAndGammas, body = List(BProcedureCall("rely"), BProcedureCall("rely")), attributes = List(externAttr)) val relyReflexive = BProcedure("rely_reflexive", body = reliesReflexive.map(r => BAssert(r)), attributes = List(externAttr)) List(relyProc, relyTransitive, relyReflexive) @@ -369,8 +384,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti BFunction(g.fnName, in, out, Some(body), List(externAttr)) case l: LOp => - val memoryVar = BParam("memory", l.memoryType) - val indexVar = BParam("index", l.indexType) + val indexVar: BVar = BParam("index", l.indexType) val body: BExpr = LPreds.keys.foldLeft(FalseBLiteral) { (ite: BExpr, next: SpecGlobal) => val guard = next.arraySize match { case Some(size: Int) => @@ -390,7 +404,8 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti } */ IfThenElse(guard, LPred, ite) } - BFunction("L", List(memoryVar, indexVar), BParam(BoolBType), Some(body), List(externAttr)) + val params = (body.params - indexVar).toList.sorted + BFunction("L", params :+ indexVar, BParam(BoolBType), Some(body), List(externAttr)) case b: ByteExtract => val valueVar = BParam("value", BitVecBType(b.valueSize)) val offsetVar = BParam("offset", BitVecBType(b.offsetSize)) @@ -717,7 +732,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti val lhs = m.mem.toBoogie val rhs = BMemoryStore(m.mem.toBoogie, m.index.toBoogie, m.value.toBoogie, m.endian, m.size) val lhsGamma = m.mem.toGamma - val rhsGamma = GammaStore(m.mem.toGamma, m.index.toBoogie, m.value.toGamma, m.size, m.size / m.mem.valueSize) + val rhsGamma = GammaStore(m.mem.toGamma, m.index.toBoogie, exprToGamma(m.value), m.size, m.size / m.mem.valueSize) val store = AssignCmd(List(lhs, lhsGamma), List(rhs, rhsGamma)) val stateSplit = s match { case MemoryAssign(_, _, _, _, _, Some(label)) => List(captureStateStatement(s"$label")) @@ -729,7 +744,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti List(store) ++ stateSplit case _: SharedMemory => val rely = BProcedureCall("rely") - val gammaValueCheck = BAssert(BinaryBExpr(BoolIMPLIES, L(lhs, rhs.index), m.value.toGamma)) + val gammaValueCheck = BAssert(BinaryBExpr(BoolIMPLIES, L(LArgs, rhs.index), exprToGamma(m.value))) val oldVars = guarantees.keys.view.toSet.flatMap { g => if (guaranteeRegions(g).contains(lhs)) { guarantees(g) @@ -749,34 +764,25 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti AssignCmd(g.toOldVar, BMemoryLoad(memory, g.toAddrVar, Endian.LittleEndian, g.size)) } val oldGammaAssigns = controlled.map { g => - val (memory, gamma) = if (regionInjector.isDefined) { + val gamma = if (regionInjector.isDefined) { regionInjector.get.getMergedRegion(g.address) match { case Some(region) => - (BMapVar(region.name, MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global), - BMapVar(s"Gamma_${region.name}", MapBType(BitVecBType(64), BoolBType), Scope.Global)) + BMapVar(s"Gamma_${region.name}", MapBType(BitVecBType(64), BoolBType), Scope.Global) case None => - (mem, Gamma_mem) + Gamma_mem } } else { - (mem, Gamma_mem) + Gamma_mem } AssignCmd( g.toOldGamma, - BinaryBExpr(BoolOR, GammaLoad(gamma, g.toAddrVar, g.size, g.size / m.mem.valueSize), L(memory, g.toAddrVar)) + BinaryBExpr(BoolOR, GammaLoad(gamma, g.toAddrVar, g.size, g.size / m.mem.valueSize), L(LArgs, g.toAddrVar)) ) } val secureUpdate = for (c <- controls.keys.view.toSeq.sorted) yield { val addrCheck = BinaryBExpr(BVEQ, rhs.index, c.toAddrVar) val checks = controls(c).toList.sorted.map { v => - val memory = if (regionInjector.isDefined) { - regionInjector.get.getMergedRegion(v.address) match { - case Some(region) => BMapVar(region.name, MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) - case None => mem - } - } else { - mem - } - BinaryBExpr(BoolIMPLIES, L(memory, v.toAddrVar), v.toOldGamma) + BinaryBExpr(BoolIMPLIES, L(LArgs, v.toAddrVar), v.toOldGamma) } val checksAnd = if (checks.size > 1) { checks.tail.foldLeft(checks.head)((next: BExpr, ands: BExpr) => BinaryBExpr(BoolAND, next, ands)) @@ -794,7 +800,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti val lhs = l.lhs.toBoogie val rhs = l.rhs.toBoogie val lhsGamma = l.lhs.toGamma - val rhsGamma = l.rhs.toGamma + val rhsGamma = exprToGamma(l.rhs) val assign = AssignCmd(List(lhs, lhsGamma), List(rhs, rhsGamma)) val loads = l.rhs.loads if (loads.size > 1) { @@ -815,4 +821,17 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti val body = a.body.toBoogie List(BAssume(body, a.comment)) } + + def exprToGamma(e: Expr): BExpr = { + val gammaVars: Set[BExpr] = e.gammas.map(_.toGamma) ++ e.loads.map(_.toGamma(LArgs)) + if (gammaVars.isEmpty) { + TrueBLiteral + } else if (gammaVars.size == 1) { + gammaVars.head + } else { + gammaVars.tail.foldLeft(gammaVars.head) { (join: BExpr, next: BExpr) => + BinaryBExpr(BoolAND, next, join) + } + } + } } diff --git a/src/main/scala/translating/SpecificationLoader.scala b/src/main/scala/translating/SpecificationLoader.scala index 1d9b82336..d4f96599f 100644 --- a/src/main/scala/translating/SpecificationLoader.scala +++ b/src/main/scala/translating/SpecificationLoader.scala @@ -276,8 +276,8 @@ case class SpecificationLoader(symbols: Set[SpecGlobal], program: Program) { def visitBoogieTypeName(ctx: BoogieTypeNameContext): BType = { ctx match { case b: BvBTypeContext => BitVecBType(Integer.parseInt(b.BVSIZE.getText.stripPrefix("bv"))) - case c: IntBTypeContext => IntBType - case c: BoolBTypeContext => BoolBType + case _: IntBTypeContext => IntBType + case _: BoolBTypeContext => BoolBType case m: MapBTypeContext => MapBType(visitBoogieTypeName(m.keyT), visitBoogieTypeName(m.valT)) } } @@ -317,26 +317,22 @@ case class SpecificationLoader(symbols: Set[SpecGlobal], program: Program) { } def visitId(ctx: IdContext, nameToGlobals: Map[String, SpecGlobal], params: Map[String, Parameter] = Map()): BExpr = { - val id = ctx.getText - id match { - case id if id.startsWith("Gamma_R") => { + ctx.getText match { + case id if id.startsWith("Gamma_R") => BVariable(id, BoolBType, Scope.Global) - } - case id if (id.startsWith("Gamma_")) => { - val gamma_id = id.stripPrefix("Gamma_") - params.get(gamma_id) match { - case Some(p: Parameter) => p.value.toGamma - case None => - nameToGlobals.get(gamma_id) match { - case Some(g: SpecGlobal) => SpecGamma(g) - case None => throw new Exception(s"unresolvable reference to '$id' in specification") + case id if id.startsWith("Gamma_") => + val gamma_id = id.stripPrefix("Gamma_") + params.get(gamma_id) match { + case Some(p: Parameter) => p.value.toGamma + case None => + nameToGlobals.get(gamma_id) match { + case Some(g: SpecGlobal) => SpecGamma(g) + case None => throw new Exception(s"unresolvable reference to '$id' in specification") + } } - } - } - case id if id.startsWith("R") => { + case id if id.startsWith("R") => BVariable(id, BitVecBType(64), Scope.Global) - } - case id => { + case id => params.get(id) match { case Some(p: Parameter) => val registerSize = p.value.size @@ -354,8 +350,7 @@ case class SpecificationLoader(symbols: Set[SpecGlobal], program: Program) { case None => throw new Exception(s"unresolvable reference to '$id' in specification") } } - } - } + } } def visitMulDivModOp(ctx: MulDivModOpContext): BVBinOp = ctx.getText match { diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index b1e372ec0..6b7b32d14 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -239,6 +239,7 @@ object IRTransform { assert(invariant.singleCallBlockEnd(ctx.program)) } + /* def generateProcedureSummaries( ctx: IRContext, IRProgram: Program, @@ -268,6 +269,7 @@ object IRTransform { modified } + */ } @@ -559,10 +561,12 @@ object RunUtils { result.reachingDefs, ctx.program ) + /* Logger.debug("[!] Generating Procedure Summaries") if (config.summariseProcedures) { IRTransform.generateProcedureSummaries(ctx, ctx.program, result.constPropResult, result.varDepsSummaries) } + */ if (modified) { iteration += 1 Logger.debug(s"[!] Analysing again (iter $iteration)") From 74e4810586291d65e7b8642ac5bc72845e191429 Mon Sep 17 00:00:00 2001 From: l-kent Date: Thu, 24 Oct 2024 09:50:41 +1000 Subject: [PATCH 092/104] prevent stack being added to rely --- src/main/scala/translating/IRToBoogie.scala | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/main/scala/translating/IRToBoogie.scala b/src/main/scala/translating/IRToBoogie.scala index e9ed8a4e6..bd68813e7 100644 --- a/src/main/scala/translating/IRToBoogie.scala +++ b/src/main/scala/translating/IRToBoogie.scala @@ -1,5 +1,5 @@ package translating -import analysis.RegionInjector +import analysis.{RegionInjector, DataRegion, HeapRegion, MergedRegion} import ir.{BoolOR, *} import boogie.* import specification.* @@ -40,10 +40,11 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti private val LArgs = lArgs private val memoriesToGamma = if (regionInjector.isDefined) { - regionInjector.get.mergedRegions.values.map { region => - val memory = BMapVar(region.name, MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) - val gamma = BMapVar(s"Gamma_${region.name}", MapBType(BitVecBType(64), BoolBType), Scope.Global) - memory -> gamma + regionInjector.get.mergedRegions.collect { + case (_: DataRegion | _: HeapRegion, region: MergedRegion) => + val memory = BMapVar(region.name, MapBType(BitVecBType(64), BitVecBType(8)), Scope.Global) + val gamma = BMapVar(s"Gamma_${region.name}", MapBType(BitVecBType(64), BoolBType), Scope.Global) + memory -> gamma }.toMap } else { Map(mem -> Gamma_mem) From 53e5ad9666e2a6ade9623f6ce38aa536b5779845 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Fri, 25 Oct 2024 11:49:13 +1000 Subject: [PATCH 093/104] Cleanup --- build.sbt | 2 - src/main/scala/analysis/AVLTree.scala | 179 ---- src/main/scala/analysis/Analysis.scala | 3 - src/main/scala/analysis/BACKUPLAttice | 834 ------------------ .../scala/analysis/GlobalRegionAnalysis.scala | 7 - .../InterprocSteensgaardAnalysis.scala | 300 ++----- .../scala/analysis/IrreducibleLoops.scala | 2 +- src/main/scala/analysis/LAST_VSA_BACKUP.scala | 276 ------ .../scala/analysis/MemoryRegionAnalysis.scala | 8 +- src/main/scala/analysis/UtilMethods.scala | 4 +- src/main/scala/ir/Expr.scala | 3 +- .../scala/ir/transforms/SplitThreads.scala | 2 +- src/main/scala/util/RunUtils.scala | 18 +- 13 files changed, 84 insertions(+), 1554 deletions(-) delete mode 100644 src/main/scala/analysis/AVLTree.scala delete mode 100644 src/main/scala/analysis/BACKUPLAttice delete mode 100644 src/main/scala/analysis/LAST_VSA_BACKUP.scala diff --git a/build.sbt b/build.sbt index e69722eff..f1e6339ff 100644 --- a/build.sbt +++ b/build.sbt @@ -10,7 +10,6 @@ val scalactic = "org.scalactic" %% "scalactic" % "3.2.10" val antlrRuntime = "org.antlr" % "antlr4-runtime" % "4.9.3" val sourceCode = "com.lihaoyi" %% "sourcecode" % "0.3.0" val mainArgs = "com.lihaoyi" %% "mainargs" % "0.5.1" -val parralelCollections = "org.scala-lang.modules" %% "scala-parallel-collections" % "1.0.4" lazy val root = project .in(file(".")) @@ -27,7 +26,6 @@ lazy val root = project libraryDependencies += scalaTests, libraryDependencies += sourceCode, libraryDependencies += mainArgs, - libraryDependencies += parralelCollections, libraryDependencies += "org.scalameta" %% "munit" % "0.7.29" % Test ) diff --git a/src/main/scala/analysis/AVLTree.scala b/src/main/scala/analysis/AVLTree.scala deleted file mode 100644 index ef65475a0..000000000 --- a/src/main/scala/analysis/AVLTree.scala +++ /dev/null @@ -1,179 +0,0 @@ -package analysis - -/** - * Node of the AVL tree. - * @param key - * @param value - * @param height - * @param left - * @param right - * @tparam K key type - * @tparam V value type - */ -case class Node[K, V](var key: K, var value: V, var height: Int, var left: Option[Node[K, V]], var right: Option[Node[K, V]]) - -/** - * AVL tree implementation. Ref. https://cs.indstate.edu/~kbalaraman/anew.pdf - * @param ordering - * @tparam K key type - * @tparam V value type - */ -class AVLTree[K, V](ordering: Ordering[K]) { - private var root: Option[Node[K, V]] = None - - // Get the height of the node - private def height(node: Option[Node[K, V]]): Int = node.map(_.height).getOrElse(0) - - // Rotate right - private def rotateRight(y: Node[K, V]): Node[K, V] = { - val x = y.left.get - val T2 = x.right - x.right = Some(y) - y.left = T2 - y.height = Math.max(height(y.left), height(y.right)) + 1 - x.height = Math.max(height(x.left), height(x.right)) + 1 - x - } - - // Rotate left - private def rotateLeft(x: Node[K, V]): Node[K, V] = { - val y = x.right.get - val T2 = y.left - y.left = Some(x) - x.right = T2 - x.height = Math.max(height(x.left), height(x.right)) + 1 - y.height = Math.max(height(y.left), height(y.right)) + 1 - y - } - - // Get balance factor of node N - private def getBalance(node: Option[Node[K, V]]): Int = node.map(n => height(n.left) - height(n.right)).getOrElse(0) - - // Insert a key-value pair - def insert(key: K, value: V): Unit = { - def insertNode(node: Option[Node[K, V]], key: K, value: V): Node[K, V] = { - if (node.isEmpty) return Node(key, value, 1, None, None) - - val n = node.get - - if (ordering.lt(key, n.key)) n.left = Some(insertNode(n.left, key, value)) - else if (ordering.gt(key, n.key)) n.right = Some(insertNode(n.right, key, value)) - else { - n.value = value - return n - } - - n.height = 1 + Math.max(height(n.left), height(n.right)) - val balance = getBalance(Some(n)) - - // Left Left Case - if (balance > 1 && ordering.lt(key, n.left.get.key)) return rotateRight(n) - - // Right Right Case - if (balance < -1 && ordering.gt(key, n.right.get.key)) return rotateLeft(n) - - // Left Right Case - if (balance > 1 && ordering.gt(key, n.left.get.key)) { - n.left = Some(rotateLeft(n.left.get)) - return rotateRight(n) - } - - // Right Left Case - if (balance < -1 && ordering.lt(key, n.right.get.key)) { - n.right = Some(rotateRight(n.right.get)) - return rotateLeft(n) - } - - n - } - - root = Some(insertNode(root, key, value)) - } - - // Search for a value by key - def search(key: K): Option[V] = { - def searchNode(node: Option[Node[K, V]], key: K): Option[V] = { - if (node.isEmpty) return None - - val n = node.get - - if (ordering.equiv(key, n.key)) Some(n.value) - else if (ordering.lt(key, n.key)) searchNode(n.left, key) - else searchNode(n.right, key) - } - - searchNode(root, key) - } - - // Delete a key-value pair - def delete(key: K): Unit = { - def minValueNode(node: Node[K, V]): Node[K, V] = { - var current = node - while (current.left.isDefined) current = current.left.get - current - } - - def deleteNode(node: Option[Node[K, V]], key: K): Option[Node[K, V]] = { - if (node.isEmpty) return None - - val n = node.get - - if (ordering.lt(key, n.key)) n.left = deleteNode(n.left, key) - else if (ordering.gt(key, n.key)) n.right = deleteNode(n.right, key) - else { - if (n.left.isEmpty || n.right.isEmpty) { - val temp = if (n.left.isDefined) n.left else n.right - if (temp.isEmpty) return None - else return temp - } else { - val temp = minValueNode(n.right.get) - n.key = temp.key - n.value = temp.value - n.right = deleteNode(n.right, temp.key) - } - } - - n.height = Math.max(height(n.left), height(n.right)) + 1 - val balance = getBalance(Some(n)) - - // Left Left Case - if (balance > 1 && getBalance(n.left) >= 0) return Some(rotateRight(n)) - - // Left Right Case - if (balance > 1 && getBalance(n.left) < 0) { - n.left = Some(rotateLeft(n.left.get)) - return Some(rotateRight(n)) - } - - // Right Right Case - if (balance < -1 && getBalance(n.right) <= 0) return Some(rotateLeft(n)) - - // Right Left Case - if (balance < -1 && getBalance(n.right) > 0) { - n.right = Some(rotateRight(n.right.get)) - return Some(rotateLeft(n)) - } - - Some(n) - } - - root = deleteNode(root, key) - } -} - -// Example usage -object AVLTreeExample extends App { - val avl = new AVLTree[Int, String](Ordering.Int) - avl.insert(10, "Value10") - avl.insert(20, "Value20") - avl.insert(30, "Value30") - avl.insert(40, "Value40") - avl.insert(50, "Value50") - avl.insert(25, "Value25") - - println(avl.search(25)) // Some(Value25) - println(avl.search(100)) // None - - avl.delete(25) - println(avl.search(25)) // None -} diff --git a/src/main/scala/analysis/Analysis.scala b/src/main/scala/analysis/Analysis.scala index b781ee1d5..035a99e11 100644 --- a/src/main/scala/analysis/Analysis.scala +++ b/src/main/scala/analysis/Analysis.scala @@ -79,9 +79,6 @@ trait ConstantPropagation(val program: Program, val assumeR31: Boolean) { var m = s n match case r: Command => - if (assumeR31 && IRWalk.procedure(n).entryBlock.isDefined && IRWalk.firstInBlock(program.mainProcedure.entryBlock.get) == n) { - m = m + (Register("R31", 64) -> eval(BitVecLiteral(Long.MaxValue, 64), m)) - } r match // assignments case la: Assign => diff --git a/src/main/scala/analysis/BACKUPLAttice b/src/main/scala/analysis/BACKUPLAttice deleted file mode 100644 index b68fae9c1..000000000 --- a/src/main/scala/analysis/BACKUPLAttice +++ /dev/null @@ -1,834 +0,0 @@ -package analysis - -import ir._ -import analysis.BitVectorEval._ -import util.Logger -import math.pow - -/** Basic lattice - */ -trait Lattice[T]: - - type Element = T - /** The bottom element of this lattice. - */ - val bottom: T - - /** The top element of this lattice. Default: not implemented. - */ - def top: T = ??? - - /** The least upper bound of `x` and `y`. - */ - def lub(x: T, y: T): T - - /** Returns true whenever `x` <= `y`. - */ - def leq(x: T, y: T): Boolean = lub(x, y) == y // rarely used, but easy to implement :-) - -//trait StridedInterval[+T] -// -//case class SI[T](s: T, l: T, u: T) extends StridedInterval[T] { -// override def toString = s"SI $s [$l, $u]" -//} -// -//case object SIBottom extends StridedInterval[BitVecLiteral] { -// override def toString = "SIBot" -//} - -///** -// * SI class that represents a strided interval -// * s is the stride -// * l is the lower bound -// * u is the upper bound -// * [l, u] is the interval -// * [l, u] \ s is the set of values -// * 0[l,l] represents the singleton set {l} -// */ -//class StridedIntervalLattice extends Lattice[StridedInterval[BitVecLiteral]] { -// val lowestPossibleValue: BitVecLiteral = BitVecLiteral(0, 64) -// val highestPossibleValue: BitVecLiteral = BitVecLiteral(Long.MaxValue - 1, 64) -// -// override val bottom: StridedInterval[BitVecLiteral] = SIBottom -// override def top: StridedInterval[BitVecLiteral] = SI(BitVecLiteral(1, 64), lowestPossibleValue, highestPossibleValue) -// -// def gamma(x: StridedInterval[BitVecLiteral]): Set[BitVecLiteral] = x match { -// case SIBottom => Set.empty -// case SI(s, l, u) => -// bitVec_interval(l, u, s) -// } -// -// /** S1[L1, U1] join S2[L2, U2] -> gcd(S1, S2)[min(L1, L2), max(U1, U2)] */ -// override def lub(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { -// (x, y) match { -// case (SIBottom, t) => t -// case (t, SIBottom) => t -// case (SI(s1, l1, u1), SI(s2, l2, u2)) => -// SI(bitVec_gcd(s1, s2), bitVec_min(l1, l2), bitVec_max(u1, u2)) -// } -// } -// -// /** S1[L1, U1] meet S2[L2, U2] -> gcd(S1, S2)[max(L1, L2), min(U1, U2)] */ -// def meet(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { -// (x, y) match { -// case (SIBottom, t) => SIBottom -// case (t, SIBottom) => SIBottom -// case (SI(s1, l1, u1), SI(s2, l2, u2)) => -// SI(bitVec_gcd(s1, s2), bitVec_max(l1, l2), bitVec_min(u1, u2)) -// } -// } -// -// /** Addition -// * Addition defined in page 6 Figure 2 of: https://dl.acm.org/doi/pdf/10.1145/1111542.1111560 -// * */ -// def add(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { -// (x, y) match { -// case (SIBottom, t) => t -// case (t, SIBottom) => t -// case (SI(s1, l1, u1), SI(s2, l2, u2)) => -// val lbound = smt_bvadd(l1, l2) -// val ubound = smt_bvadd(u1, u2) -// val s = bitVec_gcd(s1, s2) -// if (smt_bvsle(ubound, highestPossibleValue) == TrueLiteral && smt_bvsge(lbound, lowestPossibleValue) == TrueLiteral) { -// SI(s, lbound, ubound) -// } else { -// throw new IllegalArgumentException(s"Addition overflow: $lbound, $ubound") -// } -// } -// } -// -// /** Unary Minus */ -// def unaryMinus(x: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { -// x match { -// case SIBottom => SIBottom -// case SI(s, l, u) => -// if (smt_bvcomp(l, u) == BitVecLiteral(1, 1) && (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(1, 1) && smt_bvcomp(u, lowestPossibleValue) == BitVecLiteral(1, 1))) { -// SI(BitVecLiteral(0, 64), lowestPossibleValue, lowestPossibleValue) -// } else if (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(0, 1)) { -// SI(s, smt_bvneg(u), smt_bvneg(l)) -// } -// else { -// SI(BitVecLiteral(1, 64), lowestPossibleValue, highestPossibleValue) -// } -// } -// } -// -// /** Substraction */ -// def sub(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { -// add(x, unaryMinus(y)) -// } -// -// /** Widen */ -// def widen(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { -// /* formula for widening: -// given: s1[lb1, ub1] and s2[lb2, ub2] -// then: gcd(s1, s2)[min(lb1, lb2), max(ub1, ub2)] -// where: min(lb1, lb2) = lb1 if lb1 <= lb2 -// and: min(lb1, lb2) = minPossibleValue otherwise -// where: max(ub1, ub2) = ub1 if ub1 >= ub2 -// and: max(ub1, ub2) = maxPossibleValue otherwise -// -// assuming: -// minPossibleValue = lowestPossibleValue -// maxPossibleValue = highestPossibleValue + (lb - 1) mod s -// */ -// (x, y) match { -// case (SIBottom, t) => ??? -// case (t, SIBottom) => ??? -// case (SI(s1, l1, u1), SI(s2, l2, u2)) => -// val s = bitVec_gcd(s1, s2) -// val l = if (smt_bvule(l1, l2) == TrueLiteral) l1 else lowestPossibleValue -// val u = if (smt_bvuge(u1, u2) == TrueLiteral) u1 else smt_bvsmod(smt_bvadd(highestPossibleValue, smt_bvsub(l1, BitVecLiteral(1, 64))), s) -// SI(s, l, u) -// } -// } -// -// /** -// * Calculating strided interval for a list of values using accumulative gcd. -// * @param x the list of values -// * @return the strided interval representing the values in the list -// */ -// def valuesToSI(x: List[BitVecLiteral]): StridedInterval[BitVecLiteral] = { -// if (x.isEmpty) { -// SIBottom -// } else { -// val l = bitVec_min(x) -// val u = bitVec_max(x) -// val initialStride = smt_bvsub(u, l) -// val stride = x.foldLeft(initialStride) { -// case (acc, v) => bitVec_gcd(smt_bvsub(v, l), acc) -// } -// SI(stride, l, u) -// } -// } -//} - - -trait StridedWrappedInterval - -case class SI(s: BigInt, l: BigInt, u: BigInt, w: BigInt) extends StridedWrappedInterval { - if (l == u) { - require(s == 0) - } - override def toString = s"SASI $s [$l, $u] $w" -} - -case object SIBottom extends StridedWrappedInterval { - override def toString = "SASIBot" -} - -// TOP is 1[0^w, 1^w]w -case object SITop extends StridedWrappedInterval { - override def toString = "SASITop" -} - -class SASILattice extends Lattice[StridedWrappedInterval] { - val lowestPossibleValue: BigInt = 0 - val highestPossibleValue: BigInt = Long.MaxValue - 1 - - override val bottom: StridedWrappedInterval = SIBottom - - override def top: StridedWrappedInterval = SITop - -// def gamma(x: StridedWrappedInterval): Set[BitVecLiteral] = x match { -// case SIBottom => Set.empty -// case SI(s, l, u, w) => -// if (s == BitVecLiteral(0, 64)) { // singleton set -// Set(l) -// } else { -// bitVec_interval(l, u, s) -// } -// } - - def isSingleValue(x: StridedWrappedInterval): Boolean = x match { - case SI(s, l, u, w) => s == 0 && l == u - case _ => false - } - - def modularPlus(a: BigInt, b: BigInt, w: BigInt): BigInt = { - (a + b) mod BigInt(2).pow(w.toInt) - } - - def modularMinus(a: BigInt, b: BigInt, w: BigInt): BigInt = { - (a - b) mod BigInt(2).pow(w.toInt) - } - - def modularLEQ(a: BigInt, b: BigInt, x: BigInt, w: BigInt): Boolean = { - modularMinus(a, x, w) <= modularMinus(b, x, w) - } - - def membershipFunction(v: BigInt, r: StridedWrappedInterval): Boolean = { - r match { - case SIBottom => false - case SITop => true - case SI(sr, lb, ub, w) => - modularLEQ(v, ub, lb, w) && (modularMinus(v, lb, w) mod sr) == 0 - } - } - - def cardinalityFunction(r: StridedWrappedInterval, w: BigInt): BigInt = { - r match { - case SIBottom => 0 - case SITop => BigInt(2).pow(w.toInt) - case SI(sr, lb, ub, w) => ((ub - lb + 1) / sr) // TODO: this may need to be a math.floor operation - } - } - - def orderingOperator(r: StridedWrappedInterval, t: StridedWrappedInterval): Boolean = { - if (r == SITop && t != SITop) { - false - } else if (r == SIBottom || t == SITop) { - true - } else { - (r, t) match { - case (SI(sr, a, b, w1), SI(st, c, d, w2)) => - if ((a == c) && (b == d) && ((sr mod st) == 0)) { - return true - } - membershipFunction(a, t) && membershipFunction(b, t) && (!membershipFunction(c, r) || !membershipFunction(d, r)) && ((a - c) mod st) == 0 && (sr mod st) == 0 - case _ => false - } - } - } - - /** S1[L1, U1] join S2[L2, U2] -> gcd(S1, S2)[min(L1, L2), max(U1, U2)] */ - override def lub(r: StridedWrappedInterval, t: StridedWrappedInterval): StridedWrappedInterval = { -// (s, t) match { -// case (SIBottom, t) => t -// case (t, SIBottom) => t -// case (SI(a, b, u1, w1), SI(s2, c, d, w2)) => -// var u: BigInt = 0 -// var l: BigInt = 0 -// if (isSingleValue(s) && isSingleValue(t)) { -// val si1_card = WCardMod() -// val si2_card = WCardMod() -// if (si1_card <= si2_card) { -// l = a -// u = d -// } else { -// l = c -// u = b -// } -// -// SI(u - l, l, u, ) -// } -// } - - (r, t) match { - case (SI(sr, a, b, w1), SI(st, c, d, w2)) => - assert(w1 == w2) - val w = w1 // TODO: should this be the largest? - if (orderingOperator(r, t)) { - return t - } - if (orderingOperator(t, r)) { - return r - } - if (membershipFunction(a, t) && membershipFunction(b, t) && membershipFunction(c, r) && membershipFunction(d, r)) { - return SITop - } - if (membershipFunction(c, r) && membershipFunction(b, t) && !membershipFunction(a, t) && !membershipFunction(d, r)) { - return SI(sr.gcd(st).gcd(modularMinus(d, a, w)), a, d, w) - } - if (membershipFunction(a, t) && membershipFunction(d, r) && !membershipFunction(c, r) && !membershipFunction(b, t)) { - return SI(sr.gcd(st).gcd(modularMinus(b, c, w)), c, b, w) - } - val sad = SI(sr.gcd(st).gcd(modularMinus(d, a, w)), a, d, w) - val scb = SI(sr.gcd(st).gcd(modularMinus(b, c, w)), c, b, w) - if (!membershipFunction(a, t) && !membershipFunction(d, r) && !membershipFunction(c, r) && !membershipFunction(b, t) && cardinalityFunction(sad, w) <= cardinalityFunction(scb, w)) { - return sad - } - return scb - case _ => ??? - } - } - - def singletonSI(v: BigInt, w: BigInt): StridedWrappedInterval = { - SI(0, v, v, w) - } - - /** - * s + t = - * BOT if s = BOT or t = BOT - * gcd(s, t)(|a +w c, b +w d|) if s = (|a, b|), t = (|c, d|) and #s + #t <= 2^w - * @param s - * @param t - * @return - */ - def add(s: StridedWrappedInterval, t: StridedWrappedInterval): StridedWrappedInterval = { - (s, t) match { - case (SIBottom, _) => SIBottom // TODO: is this correct? - case (_, SIBottom) => SIBottom // TODO: is this correct? - case (SI(ss, a, b, w1), SI(st, c, d, w2)) if (cardinalityFunction(s, w1) + cardinalityFunction(t, w2)) <= BigInt(2).pow(w1.toInt) => - assert(w1 == w2) - return SI(ss.gcd(st), modularPlus(a, c, w1), modularPlus(b, d, w1), w1) - case _ => SITop - } - } - - def add(s: StridedWrappedInterval, t: BigInt, w: BigInt): StridedWrappedInterval = { - (s, t) match { - case (SIBottom, _) => SIBottom // TODO: is this correct? - case (SI(ss, a, b, w1), t) => - return add(s, singletonSI(t, w)) - case _ => SITop - } - } - - - - -// /** S1[L1, U1] meet S2[L2, U2] -> gcd(S1, S2)[max(L1, L2), min(U1, U2)] */ -// def meet(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { -// (x, y) match { -// case (SIBottom, t) => SIBottom -// case (t, SIBottom) => SIBottom -// case (SI(s1, l1, u1), SI(s2, l2, u2)) => -// SI(bitVec_gcd(s1, s2), bitVec_max(l1, l2), bitVec_min(u1, u2)) -// } -// } -// -// /** Addition -// * Addition defined in page 6 Figure 2 of: https://dl.acm.org/doi/pdf/10.1145/1111542.1111560 -// * */ -// def add(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { -// (x, y) match { -// case (SIBottom, t) => t -// case (t, SIBottom) => t -// case (SI(s1, l1, u1), SI(s2, l2, u2)) => -// val lbound = smt_bvadd(l1, l2) -// val ubound = smt_bvadd(u1, u2) -// val s = bitVec_gcd(s1, s2) -// if (smt_bvsle(ubound, highestPossibleValue) == TrueLiteral && smt_bvsge(lbound, lowestPossibleValue) == TrueLiteral) { -// SI(s, lbound, ubound) -// } else { -// throw new IllegalArgumentException(s"Addition overflow: $lbound, $ubound") -// } -// } -// } -// -// /** Unary Minus */ -// def unaryMinus(x: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { -// x match { -// case SIBottom => SIBottom -// case SI(s, l, u) => -// if (smt_bvcomp(l, u) == BitVecLiteral(1, 1) && (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(1, 1) && smt_bvcomp(u, lowestPossibleValue) == BitVecLiteral(1, 1))) { -// SI(BitVecLiteral(0, 64), lowestPossibleValue, lowestPossibleValue) -// } else if (smt_bvcomp(l, lowestPossibleValue) == BitVecLiteral(0, 1)) { -// SI(s, smt_bvneg(u), smt_bvneg(l)) -// } -// else { -// SI(BitVecLiteral(1, 64), lowestPossibleValue, highestPossibleValue) -// } -// } -// } -// -// /** Substraction */ -// def sub(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { -// add(x, unaryMinus(y)) -// } -// -// /** Widen */ -// def widen(x: StridedInterval[BitVecLiteral], y: StridedInterval[BitVecLiteral]): StridedInterval[BitVecLiteral] = { -// /* formula for widening: -// given: s1[lb1, ub1] and s2[lb2, ub2] -// then: gcd(s1, s2)[min(lb1, lb2), max(ub1, ub2)] -// where: min(lb1, lb2) = lb1 if lb1 <= lb2 -// and: min(lb1, lb2) = minPossibleValue otherwise -// where: max(ub1, ub2) = ub1 if ub1 >= ub2 -// and: max(ub1, ub2) = maxPossibleValue otherwise -// -// assuming: -// minPossibleValue = lowestPossibleValue -// maxPossibleValue = highestPossibleValue + (lb - 1) mod s -// */ -// (x, y) match { -// case (SIBottom, t) => ??? -// case (t, SIBottom) => ??? -// case (SI(s1, l1, u1), SI(s2, l2, u2)) => -// val s = bitVec_gcd(s1, s2) -// val l = if (smt_bvule(l1, l2) == TrueLiteral) l1 else lowestPossibleValue -// val u = if (smt_bvuge(u1, u2) == TrueLiteral) u1 else smt_bvsmod(smt_bvadd(highestPossibleValue, smt_bvsub(l1, BitVecLiteral(1, 64))), s) -// SI(s, l, u) -// } -// } -// -// /** -// * Calculating strided interval for a list of values using accumulative gcd. -// * -// * @param x the list of values -// * @return the strided interval representing the values in the list -// */ -// def valuesToSI(x: List[BitVecLiteral]): StridedInterval[BitVecLiteral] = { -// if (x.isEmpty) { -// SIBottom -// } else { -// val l = bitVec_min(x) -// val u = bitVec_max(x) -// val initialStride = smt_bvsub(u, l) -// val stride = x.foldLeft(initialStride) { -// case (acc, v) => bitVec_gcd(smt_bvsub(v, l), acc) -// } -// SI(stride, l, u) -// } -// } -} - -trait ValueSet[+T] - -case class VS[T](m: Map[T, StridedWrappedInterval]) extends ValueSet[T] { - override def toString: String = m.toString -} - -case object VSBottom extends ValueSet[Nothing] { - override def toString = "VSBot" -} - -case object VSTop extends ValueSet[Nothing] { - override def toString = "VSTop" -} - -/** The lattice of integers with the standard ordering. - */ -class ValueSetLattice[T] extends Lattice[ValueSet[T]] { - - override val bottom: ValueSet[T] = VSBottom - override def top: ValueSet[T] = VSTop - - val lattice: SASILattice = SASILattice() - - override def lub(x: ValueSet[T], y: ValueSet[T]): ValueSet[T] = { - (x, y) match { - case (VSBottom, t) => t - case (t, VSBottom) => t - case (VSTop, _) => VSTop - case (_, VSTop) => VSTop - case (VS(m1), VS(m2)) => - VS(m1.keys.foldLeft(m2) { - case (acc, k) => - val v1 = m1(k) - val v2 = m2(k) - acc + (k -> lattice.lub(v1, v2)) - }) - } - } - -// def meet(x: ValueSet[String], y: ValueSet[String]): ValueSet[String] = { -// (x, y) match { -// case (VSBottom, t) => VSBottom -// case (t, VSBottom) => VSBottom -// case (VSTop, _) => y -// case (_, VSTop) => x -// case (VS(m1), VS(m2)) => -// VS(m1.keys.foldLeft(m2) { -// case (acc, k) => -// val v1 = m1(k) -// val v2 = m2(k) -// acc + (k -> lattice.meet(v1, v2)) -// }) -// } -// } - - def add(x: ValueSet[T], y: ValueSet[T]): ValueSet[T] = { - (x, y) match { - case (VSBottom, t) => t - case (t, VSBottom) => t - case (VSTop, _) => VSTop - case (_, VSTop) => VSTop - case (VS(m1), VS(m2)) => - VS(m1.keys.foldLeft(m2) { - case (acc, k) => - val v1 = m1(k) - val v2 = m2(k) - acc + (k -> lattice.add(v1, v2)) - }) - } - } - - def add(x: ValueSet[T], y: BitVecLiteral): ValueSet[T] = { - x match { - case VSBottom => VSBottom - case VSTop => VSTop - case VS(m) => - VS(m.map { - case (k, s) => k -> lattice.add(s, y.value, y.size) // TODO: is the size correct here? - }) - } - } - - def widen(vs1: ValueSet[T], vs2: ValueSet[T]): ValueSet[T] = { - (vs1, vs2) match { - case (VSBottom, t) => ??? - case (t, VSBottom) => ??? - case (VSTop, _) => VSTop - case (_, VSTop) => VSTop - case (VS(m1), VS(m2)) => - VS(m1.keys.foldLeft(m2) { - case (acc, k) => - val v1 = m1(k) - val v2 = m2(k) - acc + (k -> lattice.widen(v1, v2)) - }) - } - } - - def removeLowerBounds(vs: ValueSet[T]): ValueSet[T] = { - vs match { - case VSBottom => VSBottom - case VSTop => VSTop - case VS(m) => - VS(m.map { - case (k, SI(s, l, u, w)) => k -> SI(s, lattice.lowestPossibleValue, u, w) - }) - } - } - - def removeUpperBound(vs: ValueSet[T]): ValueSet[T] = { - vs match { - case VSBottom => VSBottom - case VSTop => VSTop - case VS(m) => - VS(m.map { - case (k, SI(s, l, u, w)) => k -> SI(s, l, lattice.highestPossibleValue, w) - }) - } - } - - /** - * ∗(vs, s): Returns a pair of sets (F, P). F represents the set of “fully accessed” a-locs: it - * consists of the a-locs that are of size s and whose starting addresses are in vs. P represents - * the set of “partially accessed” a-locs: it consists of (i) a-locs whose starting addresses are in - * vs but are not of size s, and (ii) a-locs whose addresses are in vs but whose starting addresses - * and sizes do not meet the conditions to be in F. [Reference VSA paper] - * - * @param vsR2 - * @param s size of the dereference - * @return - */ - def dereference(s: BigInt, vs: ValueSet[String], mmm: MemoryModelMap): (Set[MemoryRegion], Set[MemoryRegion]) = { - vs match { - case VSBottom => VSBottom - case VSTop => ??? //TODO: should this return everything? - case VS(m) => - for (elem <- m) { - if (elem._2 != lattice.bottom) { // region SI defined - elem._2 match { - case SI(stride, lower, upper) => - val gamma: Set[BitVecLiteral] = lattice.gamma(SI(stride, lower, upper)) - // TODO: Global memory size can be retrieved from the symbol table and are of size s - // Map addresses to exact memory locations - val fullyAccessedLocations = gamma.toList.flatMap(address => mmm.findStackFullAccessesOnly(address.value, s)) - - // Identify partially accessed locations (if any) - val partiallyAccessedLocations = gamma.toList.flatMap(address => mmm.findStackPartialAccessesOnly(address.value, s)) - - // Return the set of fully accessed locations and the set of partially accessed locations - return (fullyAccessedLocations.diff(partiallyAccessedLocations).asInstanceOf[Set[MemoryRegion]], partiallyAccessedLocations.asInstanceOf[Set[MemoryRegion]]) - case _ => ??? - } - } - } - } - (Set.empty, Set.empty) - } -} - - - -/** The powerset lattice of a set of elements of type `A` with subset ordering. - */ -class PowersetLattice[A] extends Lattice[Set[A]] { - val bottom: Set[A] = Set.empty - def lub(x: Set[A], y: Set[A]): Set[A] = x.union(y) -} - -// Single element lattice (using Option) -class SingleElementLattice[T] extends Lattice[Option[T]] { - val bottom: Option[T] = None - def lub(x: Option[T], y: Option[T]): Option[T] = (x, y) match { - case (None, None) => None - case _ => Some(x.getOrElse(y.get)) - } -} - -trait LiftedElement[+T] -case class Lift[T](el: T) extends LiftedElement[T] { - override def toString = s"Lift($el)" -} -case object LiftedBottom extends LiftedElement[Nothing] { - override def toString = "LiftBot" -} -/** - * The lift lattice for `sublattice`. - * Supports implicit lifting and unlifting. - */ -class LiftLattice[T, +L <: Lattice[T]](val sublattice: L) extends Lattice[LiftedElement[T]] { - - val bottom: LiftedElement[T] = LiftedBottom - - def lub(x: LiftedElement[T], y: LiftedElement[T]): LiftedElement[T] = - (x, y) match { - case (LiftedBottom, t) => t - case (t, LiftedBottom) => t - case (Lift(a), Lift(b)) => Lift(sublattice.lub(a, b)) - } - - /** - * Lift elements of the sublattice to this lattice. - * Note that this method is declared as implicit, so the conversion can be done automatically. - */ - def lift(x: T): LiftedElement[T] = Lift(x) - - /** - * Un-lift elements of this lattice to the sublattice. - * Throws an IllegalArgumentException if trying to unlift the bottom element - * Note that this method is declared as implicit, so the conversion can be done automatically. - */ - def unlift(x: LiftedElement[T]): T = x match { - case Lift(s) => s - case LiftedBottom => throw new IllegalArgumentException("Cannot unlift bottom") - } -} - -trait TwoElement - -case object TwoElementTop extends TwoElement -case object TwoElementBottom extends TwoElement - - -/** - * A lattice with only top and bottom - */ -class TwoElementLattice extends Lattice[TwoElement]: - override val bottom: TwoElement = TwoElementBottom - override val top: TwoElement = TwoElementTop - - def lub(x: TwoElement, y: TwoElement): TwoElement = (x, y) match { - case (TwoElementBottom, TwoElementBottom) => TwoElementBottom - case _ => TwoElementTop - } - -trait FlatElement[+T] -case class FlatEl[T](el: T) extends FlatElement[T] -case object Top extends FlatElement[Nothing] -case object Bottom extends FlatElement[Nothing] - -/** The flat lattice made of element of `X`. Top is greater than every other element, and Bottom is less than every - * other element. No additional ordering is defined. - */ -class FlatLattice[X] extends Lattice[FlatElement[X]] { - - val bottom: FlatElement[X] = Bottom - - override val top: FlatElement[X] = Top - - def lub(x: FlatElement[X], y: FlatElement[X]): FlatElement[X] = (x, y) match { - case (a, Bottom) => a - case (Bottom, b) => b - case (a, b) if a == b => a - case (Top, _) => Top - case (_, Top) => Top - case _ => Top - } -} - -class TupleLattice[L1 <: Lattice[T1], L2 <: Lattice[T2], T1, T2](val lattice1: L1, val lattice2: L2) extends Lattice[(T1, T2)] { - override val bottom: (T1, T2) = (lattice1.bottom, lattice2.bottom) - - override def lub(x: (T1, T2), y: (T1, T2)): (T1, T2) = { - val (x1, x2) = x - val (y1, y2) = y - (lattice1.lub(x1, y1), lattice2.lub(x2, y2)) - } - - override def leq(x: (T1, T2), y: (T1, T2)): Boolean = { - val (x1, x2) = x - val (y1, y2) = y - lattice1.leq(x1, y1) && lattice2.leq(x2, y2) - } - - override def top: (T1, T2) = (lattice1.top, lattice2.top) -} - -//trait StridedIntervalLattice[T] extends Lattice[(T, T, T)] { -// override val bottom: (T, T, T) = (???, ???, ???) -// -// override def lub(x: (T1, T2), y: (T1, T2)): (T1, T2) = { -// val (x1, x2) = x -// val (y1, y2) = y -// (lattice1.lub(x1, y1), lattice2.lub(x2, y2)) -// } -// -// override def leq(x: (T1, T2), y: (T1, T2)): Boolean = { -// val (x1, x2) = x -// val (y1, y2) = y -// lattice1.leq(x1, y1) && lattice2.leq(x2, y2) -// } -// -// override def top: (T1, T2) = (lattice1.top, lattice2.top) -//} - -/** A lattice of maps from a set of elements of type `A` to a lattice with element `L'. Bottom is the default value. - */ -class MapLattice[A, T, +L <: Lattice[T]](val sublattice: L) extends Lattice[Map[A, T]] { - val bottom: Map[A, T] = Map().withDefaultValue(sublattice.bottom) - def lub(x: Map[A, T], y: Map[A, T]): Map[A, T] = - x.keys.foldLeft(y)((m, a) => m + (a -> sublattice.lub(x(a), y(a)))).withDefaultValue(sublattice.bottom) -} - -/** Constant propagation lattice. - * - */ -class ConstantPropagationLattice extends FlatLattice[BitVecLiteral] { - private def apply(op: (BitVecLiteral, BitVecLiteral) => BitVecLiteral, a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = try { - (a, b) match - case (FlatEl(x), FlatEl(y)) => FlatEl(op(x, y)) - case (Bottom, _) => Bottom - case (_, Bottom) => Bottom - case (_, Top) => Top - case (Top, _) => Top - } catch { - case e: Exception => - Logger.error(s"Failed on op $op with $a and $b") - throw e - } - - private def apply(op: BitVecLiteral => BitVecLiteral, a: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = a match - case FlatEl(x) => FlatEl(op(x)) - case Top => Top - case Bottom => Bottom - - def bv(a: BitVecLiteral): FlatElement[BitVecLiteral] = FlatEl(a) - def bvadd(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvadd, a, b) - def bvsub(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvsub, a, b) - def bvmul(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvmul, a, b) - def bvudiv(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvudiv, a, b) - def bvsdiv(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvsdiv, a, b) - def bvsrem(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvsrem, a, b) - def bvurem(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvurem, a, b) - def bvsmod(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvsmod, a, b) - def bvand(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvand, a, b) - def bvor(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvor, a, b) - def bvxor(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvxor, a, b) - def bvnand(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvnand, a, b) - def bvnor(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvnor, a, b) - def bvxnor(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvxnor, a, b) - def bvnot(a: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvnot, a) - def bvneg(a: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvneg, a) - def bvshl(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvshl, a, b) - def bvlshr(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvlshr, a, b) - def bvashr(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvashr, a, b) - def bvcomp(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_bvcomp, a, b) - def zero_extend(width: Int, a: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_zero_extend(width, _: BitVecLiteral), a) - def sign_extend(width: Int, a: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_sign_extend(width, _: BitVecLiteral), a) - def extract(high: Int, low: Int, a: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = - apply(BitVectorEval.boogie_extract(high, low, _: BitVecLiteral), a) - def concat(a: FlatElement[BitVecLiteral], b: FlatElement[BitVecLiteral]): FlatElement[BitVecLiteral] = apply(BitVectorEval.smt_concat, a, b) -} - -/** Constant propagation lattice. - * - */ -class ConstantPropagationLatticeWithSSA extends PowersetLattice[BitVecLiteral] { - private def apply(op: (BitVecLiteral, BitVecLiteral) => BitVecLiteral, a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = - val res = for { - x <- a - y <- b - } yield op(x, y) - res - - private def apply(op: BitVecLiteral => BitVecLiteral, a: Set[BitVecLiteral]): Set[BitVecLiteral] = - val res = for { - x <- a - } yield op(x) - res - - def bv(a: BitVecLiteral): Set[BitVecLiteral] = Set(a) - def bvadd(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvadd, a, b) - def bvsub(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvsub, a, b) - def bvmul(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvmul, a, b) - def bvudiv(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvudiv, a, b) - def bvsdiv(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvsdiv, a, b) - def bvsrem(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvsrem, a, b) - def bvurem(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvurem, a, b) - def bvsmod(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvsmod, a, b) - def bvand(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvand, a, b) - def bvor(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvor, a, b) - def bvxor(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvxor, a, b) - def bvnand(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvnand, a, b) - def bvnor(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvnor, a, b) - def bvxnor(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvxnor, a, b) - def bvnot(a: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvnot, a) - def bvneg(a: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvneg, a) - def bvshl(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvshl, a, b) - def bvlshr(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvlshr, a, b) - def bvashr(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvashr, a, b) - def bvcomp(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_bvcomp, a, b) - def zero_extend(width: Int, a: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_zero_extend(width, _: BitVecLiteral), a) - def sign_extend(width: Int, a: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_sign_extend(width, _: BitVecLiteral), a) - - def extract(high: Int, low: Int, a: Set[BitVecLiteral]): Set[BitVecLiteral] = - apply(BitVectorEval.boogie_extract(high, low, _: BitVecLiteral), a) - - def concat(a: Set[BitVecLiteral], b: Set[BitVecLiteral]): Set[BitVecLiteral] = apply(BitVectorEval.smt_concat, a, b) -} \ No newline at end of file diff --git a/src/main/scala/analysis/GlobalRegionAnalysis.scala b/src/main/scala/analysis/GlobalRegionAnalysis.scala index ab2a46d8a..7350d11ce 100644 --- a/src/main/scala/analysis/GlobalRegionAnalysis.scala +++ b/src/main/scala/analysis/GlobalRegionAnalysis.scala @@ -85,13 +85,6 @@ trait GlobalRegionAnalysis(val program: Program, val firstArg = tryCoerceIntoData(arg1, n, subAccess) var regions = Set.empty[DataRegion] for (i <- firstArg) { -// if (globalOffsets.contains(i.start) && globalOffsets.contains(globalOffsets(i.start))) { // get the first base address -// val newExpr = BinaryExpr(op, BitVecLiteral(globalOffsets(i.start), evalArg2.get.size), evalArg2.get) -// regions = regions ++ tryCoerceIntoData(newExpr, n, subAccess) -// } else { -// val newExpr = BinaryExpr(op, BitVecLiteral(i.start, evalArg2.get.size), evalArg2.get) -// regions = regions ++ tryCoerceIntoData(newExpr, n, subAccess) -// } val newExpr = BinaryExpr(op, BitVecLiteral(resolveGlobalOffsetSecondLast(i.start), evalArg2.get.size), evalArg2.get) regions = regions ++ tryCoerceIntoData(newExpr, n, subAccess) } diff --git a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala index 53b0e7b8d..b3d13d0dc 100644 --- a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala +++ b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala @@ -41,7 +41,8 @@ class InterprocSteensgaardAnalysis( constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], mmm: MemoryModelMap, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - globalOffsets: Map[BigInt, BigInt]) extends Analysis[Any] { + globalOffsets: Map[BigInt, BigInt], + vsaResult: Option[Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]]) extends Analysis[Any] { val solver: UnionFindSolver[StTerm] = UnionFindSolver() @@ -77,217 +78,6 @@ class InterprocSteensgaardAnalysis( BitVecLiteral(tableAddress, 64) } - /** - * Used to reduce an expression that may be a sub-region of a memory region. - * Pointer reduction example: - * R2 = R31 + 20 - * Mem[R2 + 8] <- R1 - * - * Steps: - * 1) R2 = R31 + 20 <- ie. stack access (assume R31 = stackPointer) - * ↓ - * R2 = StackRegion("stack_1", 20) - * - * 2) Mem[R2 + 8] <- R1 <- ie. memStore - * ↓ - * (StackRegion("stack_1", 20) + 8) <- R1 - * ↓ - * MMM.get(20 + 8) <- R1 - * - * @param binExpr - * @param n - * @return Set[MemoryRegion]: a set of regions that the expression may be pointing to - */ - def reducibleToRegion(binExpr: BinaryExpr, n: Command): Set[MemoryRegion] = { - var reducedRegions = Set.empty[MemoryRegion] - binExpr.arg1 match { - case variable: Variable => - val b = evaluateExpression(binExpr, constantProp(n)) - if (b.isDefined) { - val region = mmm.findDataObject(b.get.value) - reducedRegions = reducedRegions ++ region - } - if (reducedRegions.nonEmpty) { - return reducedRegions - } - val ctx = getUse(variable, n, reachingDefs) - for (i <- ctx) { - if (i != n) { // handles loops (ie. R19 = R19 + 1) %00000662 in jumptable2 - val regions = i.rhs match { - case loadL: MemoryLoad => - val foundRegions = exprToRegion(loadL.index, i) - val toReturn = mutable.Set[MemoryRegion]().addAll(foundRegions) - for { - f <- foundRegions - } { - if (memoryRegionContents.contains(f)) { - memoryRegionContents(f).foreach { - case b: BitVecLiteral => - // val region = mmm.findDataObject(b.value) - // if (region.isDefined) { - // toReturn.addOne(region.get) - // } - case r: MemoryRegion => - toReturn.addOne(r) - toReturn.remove(f) - } - } - } - toReturn.toSet - case _: BitVecLiteral => - Set.empty[MemoryRegion] - case _ => - Logger.debug(s"Unknown expression: $i") - Logger.debug(ctx) - exprToRegion(i.rhs, i) - } - val result = evaluateExpression(binExpr.arg2, constantProp(n)) - if (result.isDefined) { - val b = result.get - for { - r <- regions - } { - r match { - case stackRegion: StackRegion => - val nextOffset = bitVectorOpToBigIntOp(binExpr.op, stackRegion.start, b.value) - reducedRegions ++= exprToRegion(BinaryExpr(binExpr.op, stackPointer, BitVecLiteral(nextOffset, 64)), i) - case dataRegion: DataRegion => - val nextOffset = BinaryExpr(binExpr.op, relocatedBase(dataRegion.start), b) - val b2 = evaluateExpression(nextOffset, constantProp(n)) - if (b2.isDefined) { - reducedRegions ++= exprToRegion(b2.get, i) - } - case _ => - } - } - } - } - } - case _ => - } - reducedRegions - } - - // TODO: You must redefine how shared regions are accessed by finding if the register we are evaluating is shared - - /** - * Finds a region for a given expression using MMM results - * - * @param expr - * @param n - * @return Set[MemoryRegion]: a set of regions that the expression may be pointing to - */ - def exprToRegion(expr: Expr, n: Command): Set[MemoryRegion] = { - var res = Set[MemoryRegion]() - mmm.popContext() - mmm.pushContext(IRWalk.procedure(n).name) - expr match { // TODO: Stack detection here should be done in a better way or just merged with data - case binOp: BinaryExpr if binOp.arg1 == stackPointer => - val b = evaluateExpression(binOp.arg2, constantProp(n)) - if (b.isDefined) { - if binOp.arg2.variables.exists { v => v.sharedVariable } then { - Logger.debug("Shared stack object: " + b) - Logger.debug("Shared in: " + expr) - val regions = mmm.findSharedStackObject(b.get.value) - Logger.debug("found: " + regions) - res ++= regions - } else { - val region = mmm.findStackObject(b.get.value) - if (region.isDefined) { - res = res + region.get - } - } - } - res - case binaryExpr: BinaryExpr => - res ++= reducibleToRegion(binaryExpr, n) - res - case v: Variable if v == stackPointer => - res ++= mmm.findStackObject(0) - res - case v: Variable => - val b = evaluateExpression(expr, constantProp(n)) - if (b.isDefined) { - Logger.debug("BitVecLiteral: " + b) - val region = mmm.findDataObject(b.get.value) - if (region.isDefined) { - res += region.get - } - } - if (res.isEmpty) { // may be passed as param - val ctx = getUse(v, n, reachingDefs) - for (i <- ctx) { - i.rhs match { - case load: MemoryLoad => // treat as a region - res ++= exprToRegion(load.index, i) - case binaryExpr: BinaryExpr => - res ++= reducibleToRegion(binaryExpr, i) - res ++= exprToRegion(i.rhs, i) - case _ => // also treat as a region (for now) even if just Base + Offset without memLoad - res ++= exprToRegion(i.rhs, i) - } - } - } - res - case _ => - val b = evaluateExpression(expr, constantProp(n)) - if (b.isDefined) { - Logger.debug("BitVecLiteral: " + b) - val region = mmm.findDataObject(b.get.value) - if (region.isDefined) { - res += region.get - } - } - res - } - } - - def memLoadToRegion(memLoad: MemoryLoad, cmd: Command): Set[MemoryRegion] = { - if (mmm.getStack(cmd).nonEmpty) { - mmm.getStack(cmd).asInstanceOf[Set[MemoryRegion]] - } else { - val isGlobal = evaluateExpression(memLoad.index, constantProp(cmd)) - if (isGlobal.isDefined) { - val globalRegion = mmm.findDataObject(isGlobal.get.value) - if (globalRegion.isDefined) { - return Set(globalRegion.get) - } - return Set.empty[MemoryRegion] // TODO: IT SHOULD THROW AN EXCEPTION - //throw Exception(s"Could not find region for MemLoad: $memLoad, Command: $cmd, Eval: $isGlobal, Global: $globalRegion") - } - memLoad.index match // treats case where the index is a region and is loaded again like in jumptable2/clang_pic - case variable: Variable => - val ctx = getUse(variable, cmd, reachingDefs) - for (i <- ctx) { - i.rhs match { - case load: MemoryLoad => - return memLoadToRegion(load, i) - case _ => - } - } - case _ => - - //throw Exception(s"Could not find region for MemLoad: $memLoad, Command: $cmd, Eval: $isGlobal") - Set.empty[MemoryRegion] - } - } - -// def checkValidBase(expr: Expr, cmd: Command): Option[MemoryRegion] = { -// val evaluation = evaluateExpression(expr, constantProp(cmd)) -// if (evaluation.isDefined) { -// val isGlobal = mmm.isDataBase(evaluation.get.value) -// if (isGlobal.isEmpty) { -// val isStack = mmm.isStackBase(Long.MaxValue - evaluation.get.value) -// if (isStack.isDefined) { -// return isStack -// } -// } else { -// return isGlobal -// } -// } -// None -// } - def nodeToRegion(n: CFGPosition): Set[MemoryRegion] = { var returnRegions = Set.empty[MemoryRegion] n match { @@ -299,8 +89,39 @@ class InterprocSteensgaardAnalysis( returnRegions } - def canCoerceIntoDataRegion(bitVecLiteral: BitVecLiteral): Option[DataRegion] = { - mmm.isDataBase(bitVecLiteral.value) + def canCoerceIntoDataRegion(bitVecLiteral: BitVecLiteral, size: Int): Option[DataRegion] = { + mmm.findDataObject(bitVecLiteral.value) + } + + def vsaApproximation(variable: Variable, n: CFGPosition): Set[MemoryRegion] = { + val ctx = getUse(variable, n, reachingDefs) + var collage = Set.empty[MemoryRegion] + for (i <- ctx) { + if (i != n) { + var tryVisit = Set.empty[MemoryRegion] + if (vsaResult.isDefined) { + vsaResult.get.get(i) match + case Some(value) => value match + case Lift(el) => el.get(i.lhs) match + case Some(value) => value.foreach { + case addressValue: AddressValue => + tryVisit = tryVisit + addressValue.region + case literalValue: LiteralValue => + } + case None => + case LiftedBottom => + case _ => + case None => + } +// if (tryVisit.isEmpty) { +// tryVisit = localTransfer(i, Set.empty) +// } + if (tryVisit.nonEmpty) { + collage = collage ++ tryVisit + } + } + } + collage } /** @inheritdoc @@ -340,7 +161,7 @@ class InterprocSteensgaardAnalysis( val alpha = FreshVariable() X2_star.foreach( x => - unify(AllocVariable(x), PointerRef(alpha)) + unify(PointerRef(alpha), ExpressionVariable(x)) ) unify(IdentifierVariable(RegisterWrapperEqualSets(X1, getDefinition(X1, cmd, reachingDefs))), alpha) } @@ -348,19 +169,21 @@ class InterprocSteensgaardAnalysis( // *X1 = X2: [[X1]] = ↑a ^ [[X2]] = a where a is a fresh term variable val X1_star = nodeToRegion(node) // TODO: This is risky as it tries to coerce every value to a region (needed for functionpointer example) - val X2 = exprToRegion(memoryAssign.value, cmd) - - val alpha = FreshVariable() - X1_star.foreach(x => - unify(AllocVariable(x), PointerRef(alpha)) - ) - X2.foreach(x => unify(AllocVariable(x), alpha)) - //val X2 = unwrapExprToVar(memoryAssign.value) -// if (X2.isDefined) { -// unify(IdentifierVariable(RegisterWrapperEqualSets(X2.get, getDefinition(X2.get, cmd, reachingDefs))), alpha) -// } else { -// throw Exception(s"Could not find variable for memoryAssign: $memoryAssign, Command: $cmd") -// } + val unwrapped = unwrapExprToVar(memoryAssign.value) + if (unwrapped.isDefined) { + val X2 = unwrapped.get + val X2_regions: Set[MemoryRegion] = vsaApproximation(X2, node) + + val alpha = FreshVariable() + val pointerRef = PointerRef(alpha) + X1_star.foreach(x => + unify(ExpressionVariable(x), pointerRef) + ) + X2_regions.foreach( + x => + unify(ExpressionVariable(x), alpha) + ) + } case _ => // do nothing TODO: Maybe LocalVar too? } case _ => @@ -375,24 +198,20 @@ class InterprocSteensgaardAnalysis( /** @inheritdoc */ - def pointsTo(): Map[RegisterWrapperEqualSets | MemoryRegion, Set[RegisterWrapperEqualSets | MemoryRegion]] = { + def pointsTo(): Map[RegisterWrapperEqualSets, Set[RegisterWrapperEqualSets | MemoryRegion]] = { val solution = solver.solution() val unifications = solver.unifications() Logger.debug(s"Solution: \n${solution.mkString(",\n")}\n") Logger.debug(s"Sets: \n${unifications.values.map { s => s"{ ${s.mkString(",")} }"}.mkString(", ")}") - val vars = solution.keys - val emptyMap = Map[RegisterWrapperEqualSets | MemoryRegion, Set[RegisterWrapperEqualSets | MemoryRegion]]() - val pointsto = vars.foldLeft(emptyMap) { (a, v: Var[StTerm]) => + val vars = solution.keys.collect { case id: IdentifierVariable => id } + val emptyMap = Map[RegisterWrapperEqualSets, Set[RegisterWrapperEqualSets | MemoryRegion]]() + val pointsto = vars.foldLeft(emptyMap) { (a, v: IdentifierVariable) => val pt: Set[RegisterWrapperEqualSets | MemoryRegion] = unifications(solution(v)).collect { case PointerRef(IdentifierVariable(id)) => id case PointerRef(AllocVariable(alloc)) => alloc - case AllocVariable(alloc) => alloc }.toSet - v match - case AllocVariable(alloc) => a + (alloc -> pt) - case IdentifierVariable(id) => a + (id -> pt) - case _ => a + a + (v.id -> pt) } Logger.debug(s"\nPoints-to:\n${pointsto.map(p => s"${p._1} -> { ${p._2.mkString(",")} }").mkString("\n")}\n") pointsto @@ -427,6 +246,13 @@ case class IdentifierVariable(id: RegisterWrapperEqualSets) extends StTerm with override def toString: String = s"$id" } +/** A term variable that represents an expression in the program. + */ +case class ExpressionVariable(expr: MemoryRegion | Expr) extends StTerm with Var[StTerm] { + + override def toString: String = s"$expr" +} + /** A fresh term variable. */ case class FreshVariable(var id: Int = 0) extends StTerm with Var[StTerm] { diff --git a/src/main/scala/analysis/IrreducibleLoops.scala b/src/main/scala/analysis/IrreducibleLoops.scala index 7486cbf44..f3d3bb44e 100644 --- a/src/main/scala/analysis/IrreducibleLoops.scala +++ b/src/main/scala/analysis/IrreducibleLoops.scala @@ -24,7 +24,7 @@ private def label(p: CFGPosition) = { * */ case class LoopEdge(from: CFGPosition, to: CFGPosition) { - override def toString: String = s"(${from}, ${to})" + override def toString: String = s"(${label(from)}, ${label(to)})" } /* A loop is a subgraph of a CFG diff --git a/src/main/scala/analysis/LAST_VSA_BACKUP.scala b/src/main/scala/analysis/LAST_VSA_BACKUP.scala deleted file mode 100644 index 92bff84b7..000000000 --- a/src/main/scala/analysis/LAST_VSA_BACKUP.scala +++ /dev/null @@ -1,276 +0,0 @@ -//package analysis -//import ir.* -//import util.* -// -//import scala.collection.mutable -//import analysis.BitVectorEval.* -//import analysis.* -// -//class ActualVSA(program: Program, -// constantPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], -// reachingDefs: Map[CFGPosition, (Map[Variable, Set[LocalAssign]], Map[Variable, Set[LocalAssign]])], -// mmm: MemoryModelMap) { -// -// enum Flag { -// case CF // Carry Flag -// case ZF // Zero Flag -// case SF // Sign Flag -// case PF // Parity Flag -// case AF // Auxiliary Flag -// case OF // Overflow Flag -// } -// -// enum Bool3 { -// case True -// case False -// case Maybe -// } -// -// // TODO: This assumes no function is called Data or Heap (should be a tuple instead) -// val DATA_REGION_NAME = "Data" -// val HEAP_REGION_NAME = "Heap" -// -// val lattice: ValueSetLattice = ValueSetLattice() -// -// type MemRgn = String // all record titles -// -// val MEMORY_REGIONS: List[MemRgn] = (Set(DATA_REGION_NAME, HEAP_REGION_NAME) ++ mmm.getAllocsPerProcedure.keySet).toList.sorted -// val ALLOCS: Map[String, Set[MemoryRegion]] = mmm.getAllocsPerProcedure.asInstanceOf[Map[String, Set[MemoryRegion]]] ++ Map("Data" -> mmm.getAllDataRegions.asInstanceOf[Set[MemoryRegion]], "Heap" -> mmm.getAllHeapRegions.asInstanceOf[Set[MemoryRegion]]) -// val AllocEnv: AlocEnv = AlocEnv() -// -// // /** -// // * ∗(vs, s): Returns a pair of sets (F, P). F represents the set of “fully accessed” a-locs: it -// // * consists of the a-locs that are of size s and whose starting addresses are in vs. P represents -// // * the set of “partially accessed” a-locs: it consists of (i) a-locs whose starting addresses are in -// // * vs but are not of size s, and (ii) a-locs whose addresses are in vs but whose starting addresses -// // * and sizes do not meet the conditions to be in F. [Reference VSA paper] -// // * -// // * @param vsR2 -// // * @param s size of the dereference -// // * @return -// // */ -// // def dereference(s: BigInt): (Set[MemoryRegion], Set[MemoryRegion]) = { -// // // TODO: Global memory size can be retrieved from the symbol table and are of size s -// // // Map addresses to exact memory locations -// // val fullyAccessedLocations = stridedInterval.gamma.flatMap(address => mmm.findStackFullAccessesOnly(address.value, s)) -// // -// // // Identify partially accessed locations (if any) -// // val partiallyAccessedLocations = stridedInterval.gamma.flatMap(address => mmm.findStackPartialAccessesOnly(address.value, s)) -// // -// // // Return the set of fully accessed locations and the set of partially accessed locations -// // (fullyAccessedLocations.diff(partiallyAccessedLocations).asInstanceOf[Set[MemoryRegion]], partiallyAccessedLocations.asInstanceOf[Set[MemoryRegion]]) -// // } -// // } -// -// /** -// * Allocs Structure -// * Procedures -// * main -> {alloc1, alloc2, alloc3} -// * foo -> {alloc4, alloc5} -// * Data -// * Data -> {alloc6, alloc7} -// * Heap -// * Heap -> {alloc8, alloc9} -// */ -// case class AlocEnv() { -// private val envs: mutable.Map[MemRgn, StridedInterval[BitVecLiteral]] = preCalculate() -// private val valueSets: mutable.Map[MemRgn, ValueSet[String]] = mutable.Map[MemRgn, ValueSet[String]]() -// -// def preCalculate(): mutable.Map[MemRgn, StridedInterval[BitVecLiteral]] = { -// val res = mutable.Map[MemRgn, StridedInterval[BitVecLiteral]]() -// MEMORY_REGIONS.foreach(r => { -// res.put(r, getSrtidedIntervals(r)) -// }) -// res -// } -// -// private def getSrtidedIntervals(r: MemRgn): StridedInterval[BitVecLiteral] = { -// // if stack or data we have offset. Otherwise we mark it as bottom VS -// if (r == DATA_REGION_NAME) { -// val allocsThatBelong = ALLOCS(r).asInstanceOf[Set[DataRegion]] -// lattice.lattice.valuesToSI(allocsThatBelong.map(a => a.start).toList) -// } else if (r == HEAP_REGION_NAME) { -// lattice.lattice.bottom -// } else { -// val allocsThatBelong = ALLOCS(r).asInstanceOf[Set[StackRegion]] -// lattice.lattice.valuesToSI(allocsThatBelong.map(a => a.start).toList) -// } -// } -// -// def getVS(r: MemRgn): ValueSet[String] = { -// if (valueSets.contains(r)) { -// valueSets(r) -// } else { -// // map everything that is not r to bottom -// val cpy = envs.clone() -// cpy.keys.foreach(k => if k != r then cpy(k) = lattice.lattice.bottom) -// valueSets.put(r, VS(cpy.toMap)) -// VS(cpy.toMap) -// } -// } -// } -// -// case class AbsEnv(): -// var regEnv: mutable.Map[Variable, VS[String]] = mutable.Map[Variable, VS[String]]().withDefault(_ => lattice.bottom) -// var flagEnv: mutable.Map[Flag, Bool3] = mutable.Map[Flag, Bool3]().withDefault(_ => Bool3.Maybe) -// var alocEnv: AlocEnv = AlocEnv() -// -// def join(absEnv: AbsEnv): AbsEnv = { -// val out = AbsEnv() -// out.regEnv = regEnv.clone() -// out.flagEnv = flagEnv.clone() -// out.alocEnv = alocEnv -// absEnv.regEnv.foreach { case (k, v) => -// out.regEnv(k) = lattice.lub(regEnv(k), v) -// } -// absEnv.flagEnv.foreach { case (k, v) => -// out.flagEnv(k) = ??? -// } -// out -// } -// -// override def toString: String = { -// val env1Str = regEnv.map { case (k, v) => s"$k -> $v" }.mkString("\n\n") -// val env2Str = flagEnv.map { case (k, v) => s"$k -> $v" }.mkString("\n\n") -// val env3Str = alocEnv.toString -// s"Env1:\n\n$env1Str\n\nEnv2:\n\n$env2Str\n\nEnv3:\n\n$env3Str" -// } -// -// def AbstractTransformer(in: AbsEnv, instruction: CFGPosition): AbsEnv = { -// instruction match { -// case p: Procedure => in -// case b: Block => in -// case c: Command => -// c match -// case statement: Statement => -// statement match -// case localAssign: LocalAssign => -// localAssign.rhs match -// case binOp: BinaryExpr => -// if (binOp.arg1.isInstanceOf[Variable]) { -// val R1 = localAssign.lhs -// val R2 = binOp.arg1.asInstanceOf[Variable] -// val c = evaluateExpression(binOp.arg2, constantPropResult(instruction)) -// if (c.isDefined) { -// -// // R1 = R2 + c -// val out = in -// val vs_R2: ValueSet[String] = in.regEnv.get(R2) -// out.regEnv(R1) = lattice.add(vs_R2, c.get) -// return out -// } -// } -// in -// case memoryLoad: MemoryLoad => -// memoryLoad.index match -// case binOp: BinaryExpr => -// if (binOp.arg2.isInstanceOf[Variable]) { -// val R1 = localAssign.lhs -// val R2 = binOp.arg1.asInstanceOf[Variable] // TODO: Is R2 always a variable? -// val out = in -// getDefinition(binOp.arg2.asInstanceOf[Variable], instruction, reachingDefs).foreach { -// d => -// d.rhs match -// case binOp2: BinaryExpr => -// val c1 = evaluateExpression(binOp2.arg1, constantPropResult(instruction)) -// val c2 = evaluateExpression(binOp2.arg2, constantPropResult(instruction)) -// // R1 = *(R2 + c1) + c2 -// val vs_R2: ValueSet[String] = in.regEnv(R2) -// val s = memoryLoad.size // s is the size of dereference performed by the instruction -// val (f: Set[MemoryRegion], p: Set[MemoryRegion]) = lattice.dereference(BigInt(s), vs_R2, mmm) -// println("VSA") -// println(f) -// if (p.isEmpty) { -// val vs_rhs = f.map(r => in.regEnv(r).getAAlloc(r).valueSet).fold(lattice.bottom)(_ join _) -// out.env1(R1) = lattice.add(vs_rhs, c2.get) -// } else { -// out.env1(R1) = lattice.top -// } -// case _ => -// } -// out -// } else { -// in -// } -// case _ => in // TODO: Handle other cases -// case variable: Variable => -// ??? -// // val R1 = localAssign.lhs -// // val R2 = variable -// // // R1 >= R2 -// // val out = in -// // val vs_R1 = in.env1.getOrElseUpdate(R1, ValueSetLattice.BOTTOM) -// // val vs_R2 = in.env1(R2) -// // val vs_lb = vs_R2.removeUpperBounds() -// // val vs_ub = vs_R1.removeLowerBounds() -// // out.env1(R1) = vs_R1.meet(vs_lb) -// // out.env1(R2) = vs_R2.meet(vs_ub) -// // out -// case bitVecLiteral: BitVecLiteral => -// ??? -// // val R1 = localAssign.lhs -// // val c = bitVecLiteral -// // // R1 <= c -// // // from 0 to c, all value sets are possible (ie. stack, global) TODO: this may be wrong because of the _ join _? -// // val interval = bitVec_interval(BitVecLiteral(0, c.size), c, BitVecLiteral(1, c.size)) -// // val regions: mutable.Set[MemoryRegion] = mutable.Set() -// // println(c) -// // interval.foreach(v => -// // val dataObject = mmm.findDataObject(v.value) -// // if dataObject.isDefined then regions.add(dataObject.get) -// // ) -// // TOP_STRIDE.gamma.map(v => regions.add(mmm.findStackObject(v.value).get)) -// // -// // val allValueSets: mutable.Set[ValueSet] = mutable.Set() -// // regions.foreach(r => allValueSets.add(in.env2(r).getAAlloc(r).valueSet)) -// // val vs_c = allValueSets.fold(ValueSetLattice.BOTTOM)(_ join _) -// // val out = in -// // out.env1(R1) = in.env1(R1).meet(vs_c) -// // out -// -// // val vs_c = ValueSet(Set(StridedInterval(smt_gcd(BitVecLiteral(BigInt(0), c.size), c), BitVecLiteral(BigInt(0), c.size), c))) // TODO: Fix ME -// // val out = in -// // out.env1(R1) = in.env1(R1).meet(vs_c) -// // out -// case _ => in // TODO: Handle other cases -// case memoryAssign: MemoryAssign => in // TODO: *(R1 + c1) = R2 + c2 -// case nop: NOP => in -// case assert: Assert => in -// case assume: Assume => in -// case jump: Jump => in -// } -// } -// -// def IntraProceduralVSA(): mutable.Map[CFGPosition, AbsEnv] = { -// val worklist = new mutable.Queue[CFGPosition]() -// worklist.enqueue(program.mainProcedure) -// -// val absEnv_enter = AbsEnv() -// val abstractStates = mutable.Map[CFGPosition, AbsEnv](worklist.head -> absEnv_enter) -// while(worklist.nonEmpty) { -// val n: CFGPosition = worklist.dequeue() -// val m = IntraProcIRCursor.succ(n) -// for (succ <- m) { -// mmm.popContext() -// mmm.pushContext(IRWalk.procedure(n).name) -// val edge_amc = AbstractTransformer(abstractStates(n), succ) -// Propagate(succ, edge_amc) -// } -// } -// -// def Propagate(n: CFGPosition, edge_amc: AbsEnv): Unit = { -// if (!abstractStates.contains(n)) { -// abstractStates(n) = edge_amc -// worklist.enqueue(n) -// } else { -// val oldEnv = abstractStates(n) -// val newEnv = oldEnv.join(edge_amc) -// if (newEnv != oldEnv) { -// abstractStates(n) = newEnv -// worklist.enqueue(n) -// } -// } -// } -// abstractStates -// } -//} diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 362d3b9cc..6780c386b 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -221,7 +221,13 @@ trait MemoryRegionAnalysis(val program: Program, val newHeapRegion = HeapRegion(name, start, negB, IRWalk.procedure(n)) addReturnHeap(directCall, newHeapRegion) s - case None => s + case None => + // Assume heap region size is at least 1 TODO: must approximate size of heap + val negB = 1 + val (name, start) = nextMallocCount(negB) + val newHeapRegion = HeapRegion(name, start, negB, IRWalk.procedure(n)) + addReturnHeap(directCall, newHeapRegion) + s } } else { s diff --git a/src/main/scala/analysis/UtilMethods.scala b/src/main/scala/analysis/UtilMethods.scala index ae30810db..43ebc6f3a 100644 --- a/src/main/scala/analysis/UtilMethods.scala +++ b/src/main/scala/analysis/UtilMethods.scala @@ -179,7 +179,7 @@ def unwrapExpr(expr: Expr): Option[MemoryLoad] = { case e: ZeroExtend => unwrapExpr(e.body) case repeat: Repeat => unwrapExpr(repeat.body) case unaryExpr: UnaryExpr => unwrapExpr(unaryExpr.arg) - case binaryExpr: BinaryExpr => + case binaryExpr: BinaryExpr => // TODO: incorrect unwrapExpr(binaryExpr.arg1) unwrapExpr(binaryExpr.arg2) case memoryLoad: MemoryLoad => @@ -198,7 +198,7 @@ def unwrapExprToVar(expr: Expr): Option[Variable] = { case e: ZeroExtend => unwrapExprToVar(e.body) case repeat: Repeat => unwrapExprToVar(repeat.body) case unaryExpr: UnaryExpr => unwrapExprToVar(unaryExpr.arg) - case binaryExpr: BinaryExpr => + case binaryExpr: BinaryExpr => // TODO: incorrect unwrapExprToVar(binaryExpr.arg1) unwrapExprToVar(binaryExpr.arg2) case memoryLoad: MemoryLoad => unwrapExprToVar(memoryLoad.index) diff --git a/src/main/scala/ir/Expr.scala b/src/main/scala/ir/Expr.scala index 6a7c862cd..f9ad4faf8 100644 --- a/src/main/scala/ir/Expr.scala +++ b/src/main/scala/ir/Expr.scala @@ -336,7 +336,6 @@ sealed trait Global sealed trait Variable extends Expr { val name: String val irType: IRType - var sharedVariable: Boolean = false override def getType: IRType = irType override def variables: Set[Variable] = Set(this) @@ -365,7 +364,7 @@ case class Register(override val name: String, size: Int) extends Variable with case class LocalVar(override val name: String, override val irType: IRType) extends Variable { override def toGamma: BVar = BVariable(s"Gamma_$name", BoolBType, Scope.Local) override def toBoogie: BVar = BVariable(s"$name", irType.toBoogie, Scope.Local) - override def toString: String = s"LocalVar(${name}_$sharedVariable, $irType)" + override def toString: String = s"LocalVar($name, $irType)" override def acceptVisit(visitor: Visitor): Variable = visitor.visitLocalVar(this) } diff --git a/src/main/scala/ir/transforms/SplitThreads.scala b/src/main/scala/ir/transforms/SplitThreads.scala index 1496c5e33..ef9e0b853 100644 --- a/src/main/scala/ir/transforms/SplitThreads.scala +++ b/src/main/scala/ir/transforms/SplitThreads.scala @@ -20,7 +20,7 @@ import cilvisitor._ // do reachability analysis // also need a bit in the IR where it creates separate files def splitThreads(program: Program, - pointsTo: Map[RegisterWrapperEqualSets | MemoryRegion, Set[RegisterWrapperEqualSets | MemoryRegion]], + pointsTo: Map[RegisterWrapperEqualSets, Set[RegisterWrapperEqualSets | MemoryRegion]], regionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])] ): Unit = { diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 93e60bd12..7bfad8e3b 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -58,7 +58,7 @@ case class StaticAnalysisContext( vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]], paramResults: Map[Procedure, Set[Variable]], - steensgaardResults: Map[RegisterWrapperEqualSets | MemoryRegion, Set[RegisterWrapperEqualSets | MemoryRegion]], + steensgaardResults: Map[RegisterWrapperEqualSets, Set[RegisterWrapperEqualSets | MemoryRegion]], mmmResults: MemoryModelMap, memoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], @@ -405,6 +405,13 @@ object StaticAnalysis { mmm.convertMemoryRegions(mraSolver.procedureToStackRegions, mraSolver.procedureToHeapRegions, mraSolver.mergeRegions, mraResult, mraSolver.procedureToSharedRegions, graSolver.getDataMap, graResult) mmm.logRegions() + Logger.debug("[!] Running Steensgaard") + val steensgaardSolver = InterprocSteensgaardAnalysis(interDomain.toSet, constPropResult, mmm, reachingDefinitionsAnalysisResults, globalOffsets, previousVSAResults) + steensgaardSolver.analyze() + val steensgaardResults = steensgaardSolver.pointsTo() + val memoryRegionContents = steensgaardSolver.getMemoryRegionContents + mmm.logRegions(memoryRegionContents) + Logger.debug("[!] Running VSA") val vsaSolver = ValueSetAnalysisSolver(domain.toSet, IRProgram, mmm, constPropResult, reachingDefinitionsAnalysisResults) val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = vsaSolver.analyze() @@ -416,13 +423,6 @@ object StaticAnalysis { ) }) - Logger.debug("[!] Running Steensgaard") - val steensgaardSolver = InterprocSteensgaardAnalysis(interDomain.toSet, constPropResult, mmm, reachingDefinitionsAnalysisResults, globalOffsets) - steensgaardSolver.analyze() - val steensgaardResults = steensgaardSolver.pointsTo() - val memoryRegionContents = steensgaardSolver.getMemoryRegionContents - mmm.logRegions(memoryRegionContents) - Logger.debug("[!] Injecting regions") val regionInjector = RegionInjector(domain, IRProgram, constPropResult, mmm, reachingDefinitionsAnalysisResults, globalOffsets) regionInjector.nodeVisitor() @@ -557,7 +557,7 @@ object RunUtils { var iteration = 1 var modified: Boolean = true val analysisResult = mutable.ArrayBuffer[StaticAnalysisContext]() - while (modified) { + while (modified || analysisResult.size < 2) { Logger.debug("[!] Running Static Analysis") val result = StaticAnalysis.analyse(ctx, config, iteration, analysisResult.lastOption) analysisResult.append(result) From d0ca3ac21a41802714b85b3796ad75408de69fb7 Mon Sep 17 00:00:00 2001 From: l-kent Date: Mon, 28 Oct 2024 10:31:00 +1000 Subject: [PATCH 094/104] re-enable procedure summaries --- src/main/scala/analysis/SummaryGenerator.scala | 6 ++---- src/main/scala/util/RunUtils.scala | 6 ++---- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/src/main/scala/analysis/SummaryGenerator.scala b/src/main/scala/analysis/SummaryGenerator.scala index b264755dd..18a5b4205 100644 --- a/src/main/scala/analysis/SummaryGenerator.scala +++ b/src/main/scala/analysis/SummaryGenerator.scala @@ -1,4 +1,3 @@ -/* package analysis import analysis.* @@ -96,7 +95,7 @@ class SummaryGenerator( private def toGamma(variable: Taintable): Option[BExpr] = { variable match { - case variable: Register => Some(variable.toGamma()) + case variable: Register => Some(variable.toGamma) case variable: LocalVar => None case variable: GlobalVariable => Some(variable.toGamma) //case variable: LocalStackVariable => None @@ -185,5 +184,4 @@ class SummaryGenerator( } } } -} -*/ \ No newline at end of file +} \ No newline at end of file diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index b53af6d36..0dcfb70bc 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -239,7 +239,6 @@ object IRTransform { assert(invariant.singleCallBlockEnd(ctx.program)) } - /* def generateProcedureSummaries( ctx: IRContext, IRProgram: Program, @@ -269,7 +268,6 @@ object IRTransform { modified } - */ } @@ -583,12 +581,12 @@ object RunUtils { result.vsaResult, ctx.program ) - /* + Logger.debug("[!] Generating Procedure Summaries") if (config.summariseProcedures) { IRTransform.generateProcedureSummaries(ctx, ctx.program, result.constPropResult, result.varDepsSummaries) } - */ + if (modified) { iteration += 1 Logger.debug(s"[!] Analysing again (iter $iteration)") From 46b144ca505a858513770a933cd2a9193e387966 Mon Sep 17 00:00:00 2001 From: yousifpatti Date: Tue, 29 Oct 2024 12:01:43 +1000 Subject: [PATCH 095/104] Fixes to pointers-to-pointers --- .../scala/analysis/GlobalRegionAnalysis.scala | 32 +++++---- src/main/scala/analysis/MemoryModelMap.scala | 70 +++++++++---------- src/main/scala/util/RunUtils.scala | 9 ++- 3 files changed, 61 insertions(+), 50 deletions(-) diff --git a/src/main/scala/analysis/GlobalRegionAnalysis.scala b/src/main/scala/analysis/GlobalRegionAnalysis.scala index 7350d11ce..947defa5b 100644 --- a/src/main/scala/analysis/GlobalRegionAnalysis.scala +++ b/src/main/scala/analysis/GlobalRegionAnalysis.scala @@ -10,7 +10,6 @@ trait GlobalRegionAnalysis(val program: Program, val constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], val reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], val mmm: MemoryModelMap, - val globalOffsets: Map[BigInt, BigInt], val vsaResult: Option[Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]]) { var dataCount: Int = 0 @@ -49,12 +48,20 @@ trait GlobalRegionAnalysis(val program: Program, def getDataMap: mutable.HashMap[BigInt, DataRegion] = dataMap - def resolveGlobalOffsetSecondLast(address: BigInt): BigInt = { + /** + * For DataRegions, the actual address used needs to be converted to the relocated address. + * This is because when regions are found, the relocated address is used and as such match + * the correct range. + * + * @param address: The starting DataRegion + * @return DataRegion: The relocated data region if any + */ + def resolveGlobalOffsetSecondLast(address: DataRegion): DataRegion = { var tableAddress = address // addresses may be layered as in jumptable2 example for which recursive search is required var exitLoop = false - while (globalOffsets.contains(tableAddress) && globalOffsets.contains(globalOffsets(tableAddress)) && !exitLoop) { - val newAddress = globalOffsets.getOrElse(tableAddress, tableAddress) + while (mmm.relocatedDataRegion(tableAddress.start).isDefined && mmm.relocatedDataRegion(mmm.relocatedDataRegion(tableAddress.start).get.start).isDefined && !exitLoop) { + val newAddress = mmm.relocatedDataRegion(tableAddress.start).getOrElse(tableAddress) if (newAddress == tableAddress) { exitLoop = true } else { @@ -64,7 +71,7 @@ trait GlobalRegionAnalysis(val program: Program, tableAddress } - def tryCoerceIntoData(exp: Expr, n: Command, subAccess: BigInt): Set[DataRegion] = { + def tryCoerceIntoData(exp: Expr, n: Command, subAccess: BigInt, loadOp: Boolean = false): Set[DataRegion] = { val eval = evaluateExpression(exp, constantProp(n)) if (eval.isDefined) { val region = dataPoolMaster(eval.get.value, subAccess) @@ -82,10 +89,10 @@ trait GlobalRegionAnalysis(val program: Program, case BinaryExpr(op, arg1, arg2) => val evalArg2 = evaluateExpression(arg2, constantProp(n)) if (evalArg2.isDefined) { - val firstArg = tryCoerceIntoData(arg1, n, subAccess) + val firstArg = tryCoerceIntoData(arg1, n, subAccess, true) var regions = Set.empty[DataRegion] for (i <- firstArg) { - val newExpr = BinaryExpr(op, BitVecLiteral(resolveGlobalOffsetSecondLast(i.start), evalArg2.get.size), evalArg2.get) + val newExpr = BinaryExpr(op, BitVecLiteral(i.start, evalArg2.get.size), evalArg2.get) regions = regions ++ tryCoerceIntoData(newExpr, n, subAccess) } return regions @@ -136,11 +143,13 @@ trait GlobalRegionAnalysis(val program: Program, } } } - collage + collage.map(i => + val resolved = resolveGlobalOffsetSecondLast(i) + if !loadOp then mmm.relocatedDataRegion(i.start).getOrElse(i) else resolved) case _ => Set.empty } - def evalMemLoadToGlobal(index: Expr, size: BigInt, n: Command): Set[DataRegion] = { + def evalMemLoadToGlobal(index: Expr, size: BigInt, n: Command, loadOp: Boolean = false): Set[DataRegion] = { val indexValue = evaluateExpression(index, constantProp(n)) if (indexValue.isDefined) { val indexValueBigInt = indexValue.get.value @@ -210,7 +219,7 @@ trait GlobalRegionAnalysis(val program: Program, case assign: Assign => val unwrapped = unwrapExpr(assign.rhs) if (unwrapped.isDefined) { - return checkIfDefined(evalMemLoadToGlobal(unwrapped.get.index, unwrapped.get.size, cmd), n) + return checkIfDefined(evalMemLoadToGlobal(unwrapped.get.index, unwrapped.get.size, cmd, loadOp = true), n) } else { // this is a constant but we need to check if it is a data region return checkIfDefined(evalMemLoadToGlobal(assign.rhs, 1, cmd), n) @@ -231,9 +240,8 @@ class GlobalRegionAnalysisSolver( constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], mmm: MemoryModelMap, - globalOffsets: Map[BigInt, BigInt], vsaResult: Option[Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]] - ) extends GlobalRegionAnalysis(program, domain, constantProp, reachingDefs, mmm, globalOffsets, vsaResult) + ) extends GlobalRegionAnalysis(program, domain, constantProp, reachingDefs, mmm, vsaResult) with IRIntraproceduralForwardDependencies with Analysis[Map[CFGPosition, Set[DataRegion]]] with SimpleWorklistFixpointSolver[CFGPosition, Set[DataRegion], PowersetLattice[DataRegion]] \ No newline at end of file diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index 31ed7781e..88abee41a 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -22,7 +22,7 @@ case class RangeKey(start: BigInt, end: BigInt) extends Ordered[RangeKey]: // Custom data structure for storing range-to-object mappings -class MemoryModelMap { +class MemoryModelMap(val globalOffsets: Map[BigInt, BigInt]) { private val MAX_BIGINT: BigInt = BigInt(Long.MaxValue) private val contextStack = mutable.Stack.empty[String] private val sharedContextStack = mutable.Stack.empty[List[StackRegion]] @@ -38,6 +38,7 @@ class MemoryModelMap { private val cfgPositionToDataRegion: mutable.Map[CFGPosition, Set[DataRegion]] = mutable.Map() private val heapCalls: mutable.Map[DirectCall, HeapRegion] = mutable.Map() private val mergedRegions: mutable.Map[Set[MemoryRegion], String] = mutable.Map() + private var relocatedAddressesMap: Map[BigInt, DataRegion] = Map() private val stackAllocationSites: mutable.Map[CFGPosition, Set[StackRegion]] = mutable.Map() @@ -158,43 +159,38 @@ class MemoryModelMap { } } - /** - * For DataRegions, the actual address used needs to be converted to the relocated address. - * This is because when regions are found, the relocated address is used and as such match - * the correct range. - * - * @param address - * @param globalOffsets - * @return BitVector: a BitVector representing the actual address - */ - private def resolveInverseGlobalOffset(address: BigInt, globalOffsets: Map[BigInt, BigInt]): BigInt = { - val inverseGlobalOffsets = globalOffsets.map(_.swap) - var tableAddress = inverseGlobalOffsets.getOrElse(address, address) - // addresses may be layered as in jumptable2 example for which recursive search is required - var exitLoop = false - while (inverseGlobalOffsets.contains(tableAddress) && !exitLoop) { - val newAddress = inverseGlobalOffsets.getOrElse(tableAddress, tableAddress) - if (newAddress == tableAddress) { - exitLoop = true - } else { - tableAddress = newAddress - } - } - tableAddress + private var relocCount: Int = 0 + private def nextRelocCount() = { + relocCount += 1 + s"reloc_$relocCount" } - def preLoadGlobals(externalFunctions: Map[BigInt, String], globalOffsets: Map[BigInt, BigInt], globalAddresses: Map[BigInt, String], globalSizes: Map[String, Int]): Unit = { + // size of pointer is 8 bytes + val SIZE_OF_POINTER = 8 + + def preLoadGlobals(externalFunctions: Map[BigInt, String], globalAddresses: Map[BigInt, String], globalSizes: Map[String, Int]): Unit = { + val relocRegions = globalOffsets.map((offset, _) => DataRegion(nextRelocCount(), offset, SIZE_OF_POINTER)) + // map externalFunctions name, value to DataRegion(name, value) and then sort by value - val reversedExternalFunctionRgns = externalFunctions.map((offset, name) => resolveInverseGlobalOffset(offset, globalOffsets) -> name) - val filteredGlobalOffsets = globalAddresses.filterNot((offset, name) => reversedExternalFunctionRgns.contains(offset)) + val filteredGlobalOffsets = globalAddresses.filterNot((offset, name) => externalFunctions.contains(offset)) - val externalFunctionRgns = (reversedExternalFunctionRgns ++ filteredGlobalOffsets).map((offset, name) => DataRegion(name, offset, (globalSizes.getOrElse(name, 1).toDouble / 8).ceil.toInt)) + val externalFunctionRgns = (externalFunctions ++ filteredGlobalOffsets).map((offset, name) => DataRegion(name, offset, (globalSizes.getOrElse(name, 1).toDouble / 8).ceil.toInt)) // add externalFunctionRgn to dataRgns and sort by value - val allDataRgns = externalFunctionRgns.toList.sortBy(_.start) + val allDataRgns = (externalFunctionRgns ++ relocRegions).toList.sortBy(_.start) for (dataRgn <- allDataRgns) { add(dataRgn.start, dataRgn) } + + // cannot fail to find any regions here + relocatedAddressesMap = globalOffsets.map((offset, offset2) => { + val newRegion = findDataObject(offset2).get + (offset, newRegion) + }) + } + + def relocatedDataRegion(value: BigInt): Option[DataRegion] = { + relocatedAddressesMap.get(value) } def convertMemoryRegions(stackRegionsPerProcedure: mutable.Map[Procedure, mutable.Set[StackRegion]], heapRegions: mutable.Map[DirectCall, HeapRegion], mergeRegions: mutable.Set[Set[MemoryRegion]], allocationSites: Map[CFGPosition, Set[StackRegion]], procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]], graRegions: mutable.HashMap[BigInt, DataRegion], graResults: Map[CFGPosition, Set[DataRegion]]): Unit = { @@ -209,9 +205,12 @@ class MemoryModelMap { if (obj.isEmpty) { Logger.debug(s"Data region $dr not found in the new data map") } else { - val address = dr.start - val size = dr.size - obj.get.relfContent.add(dr.regionIdentifier) + val isRelocated = relocatedDataRegion(dr.start) + if (isRelocated.isDefined) { + obj.get.relfContent.add(isRelocated.get.regionIdentifier) + } else { + obj.get.relfContent.add(dr.regionIdentifier) + } } } @@ -574,18 +573,19 @@ trait MemoryRegion { val subAccesses: mutable.Set[BigInt] = mutable.Set() } -case class StackRegion(override val regionIdentifier: String, start: BigInt, parent: Procedure) extends MemoryRegion { +case class StackRegion(override val regionIdentifier: String, override val start: BigInt, parent: Procedure) extends MemoryRegion { override def toString: String = s"Stack($regionIdentifier, $start, ${parent.name}, $subAccesses)" } -case class HeapRegion(override val regionIdentifier: String, start: BigInt, size: BigInt, parent: Procedure) extends MemoryRegion { +case class HeapRegion(override val regionIdentifier: String, override val start: BigInt, size: BigInt, parent: Procedure) extends MemoryRegion { override def toString: String = s"Heap($regionIdentifier, $size)" } -case class DataRegion(override val regionIdentifier: String, start: BigInt, size: BigInt) extends MemoryRegion { +case class DataRegion(override val regionIdentifier: String, override val start: BigInt, size: BigInt) extends MemoryRegion { override def toString: String = s"Data($regionIdentifier, $start, $size, ($relfContent))" def end: BigInt = start + size - 1 val relfContent: mutable.Set[String] = mutable.Set[String]() + val isPointerTo: Option[DataRegion] = None } class UnionFind { diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 7bfad8e3b..ac00eb663 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -30,6 +30,7 @@ import java.util.Base64 import spray.json.DefaultJsonProtocol.* import util.intrusive_list.IntrusiveList import cilvisitor.* +import util.StaticAnalysis.printAnalysisResults import scala.annotation.tailrec import scala.collection.mutable @@ -362,8 +363,8 @@ object StaticAnalysis { ) }) - val mmm = MemoryModelMap() - mmm.preLoadGlobals(mergedSubroutines, globalOffsets, globalAddresses, globalSizes) + val mmm = MemoryModelMap(globalOffsets) + mmm.preLoadGlobals(mergedSubroutines, globalAddresses, globalSizes) var previousVSAResults = Option.empty[Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]] if (previousResults.isDefined) { @@ -371,7 +372,7 @@ object StaticAnalysis { } Logger.debug("[!] Running GRA") - val graSolver = GlobalRegionAnalysisSolver(IRProgram, domain.toSet, constPropResult, reachingDefinitionsAnalysisResults, mmm, globalOffsets, previousVSAResults) + val graSolver = GlobalRegionAnalysisSolver(IRProgram, domain.toSet, constPropResult, reachingDefinitionsAnalysisResults, mmm, previousVSAResults) val graResult = graSolver.analyze() Logger.debug("[!] Running MRA") @@ -587,6 +588,8 @@ object RunUtils { transforms.splitThreads(ctx.program, analysisResult.last.steensgaardResults, analysisResult.last.memoryRegionContents, analysisResult.last.reachingDefs) } + writeToFile(ctx.program.toString(), s"AfterAnalysis.txt") + assert(invariant.singleCallBlockEnd(ctx.program)) Logger.debug(s"[!] Finished indirect call resolution after $iteration iterations") analysisResult.last From 937cfc2dd85e6b83bedb433014319bb1e4dbdd5d Mon Sep 17 00:00:00 2001 From: l-kent Date: Wed, 30 Oct 2024 09:50:01 +1000 Subject: [PATCH 096/104] general clean up --- src/main/scala/analysis/Analysis.scala | 75 ++-- .../scala/analysis/GlobalRegionAnalysis.scala | 206 +++++------ .../InterprocSteensgaardAnalysis.scala | 204 ++++------- src/main/scala/analysis/MemoryModelMap.scala | 262 ++------------ .../scala/analysis/MemoryRegionAnalysis.scala | 261 ++++++-------- src/main/scala/analysis/ReachingDefs.scala | 14 +- src/main/scala/analysis/RegionInjector.scala | 10 +- src/main/scala/analysis/UtilMethods.scala | 19 - src/main/scala/analysis/VSA.scala | 95 ++--- src/main/scala/ir/Program.scala | 4 +- .../transforms/IndirectCallResolution.scala | 332 ++++++------------ .../scala/ir/transforms/SplitThreads.scala | 79 ++--- src/main/scala/util/RunUtils.scala | 42 +-- 13 files changed, 533 insertions(+), 1070 deletions(-) diff --git a/src/main/scala/analysis/Analysis.scala b/src/main/scala/analysis/Analysis.scala index 035a99e11..969bbc2e3 100644 --- a/src/main/scala/analysis/Analysis.scala +++ b/src/main/scala/analysis/Analysis.scala @@ -22,7 +22,7 @@ trait Analysis[+R]: /** Base class for value analysis with simple (non-lifted) lattice. */ -trait ConstantPropagation(val program: Program, val assumeR31: Boolean) { +trait ConstantPropagation(val program: Program) { /** The lattice of abstract states. */ @@ -32,9 +32,9 @@ trait ConstantPropagation(val program: Program, val assumeR31: Boolean) { /** Default implementation of eval. */ - def eval(exp: Expr, env: Map[Variable, FlatElement[BitVecLiteral]]): FlatElement[BitVecLiteral] = + def eval(exp: Expr, env: Map[Variable, FlatElement[BitVecLiteral]]): FlatElement[BitVecLiteral] = { import valuelattice._ - exp match + exp match { case id: Variable => env(id) case n: BitVecLiteral => bv(n) case ze: ZeroExtend => zero_extend(ze.extension, eval(ze.body, env)) @@ -43,7 +43,7 @@ trait ConstantPropagation(val program: Program, val assumeR31: Boolean) { case bin: BinaryExpr => val left = eval(bin.arg1, env) val right = eval(bin.arg2, env) - bin.op match + bin.op match { case BVADD => bvadd(left, right) case BVSUB => bvsub(left, right) case BVMUL => bvmul(left, right) @@ -63,29 +63,29 @@ trait ConstantPropagation(val program: Program, val assumeR31: Boolean) { case BVASHR => bvashr(left, right) case BVCOMP => bvcomp(left, right) case BVCONCAT => concat(left, right) - + } case un: UnaryExpr => val arg = eval(un.arg, env) - - un.op match + un.op match { case BVNOT => bvnot(arg) case BVNEG => bvneg(arg) - + } case _ => valuelattice.top + } + } + /** Transfer function for state lattice elements. */ - def localTransfer(n: CFGPosition, s: Map[Variable, FlatElement[BitVecLiteral]]): Map[Variable, FlatElement[BitVecLiteral]] = - var m = s - n match - case r: Command => - r match - // assignments - case la: Assign => - m + (la.lhs -> eval(la.rhs, m)) - // all others: like no-ops - case _ => m - case _ => m + def localTransfer(n: CFGPosition, s: Map[Variable, FlatElement[BitVecLiteral]]): Map[Variable, FlatElement[BitVecLiteral]] = { + n match { + // assignments + case la: Assign => + s + (la.lhs -> eval(la.rhs, s)) + // all others: like no-ops + case _ => s + } + } /** The analysis lattice. */ @@ -98,7 +98,7 @@ trait ConstantPropagation(val program: Program, val assumeR31: Boolean) { def transfer(n: CFGPosition, s: Map[Variable, FlatElement[BitVecLiteral]]): Map[Variable, FlatElement[BitVecLiteral]] = localTransfer(n, s) } -class ConstantPropagationSolver(program: Program, assumeR31: Boolean = false) extends ConstantPropagation(program, assumeR31) +class ConstantPropagationSolver(program: Program) extends ConstantPropagation(program) with SimplePushDownWorklistFixpointSolver[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]], MapLattice[Variable, FlatElement[BitVecLiteral], ConstantPropagationLattice]] with IRInterproceduralForwardDependencies with Analysis[Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]]] @@ -116,9 +116,9 @@ trait ConstantPropagationWithSSA(val program: Program, val reachingDefs: Map[CFG /** Default implementation of eval. */ - def eval(exp: Expr, env: Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], n: CFGPosition): Set[BitVecLiteral] = + def eval(exp: Expr, env: Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], n: CFGPosition): Set[BitVecLiteral] = { import valuelattice._ - exp match + exp match { case id: Variable => env(RegisterWrapperEqualSets(id, getUse(id, n, reachingDefs))) case n: BitVecLiteral => bv(n) case ze: ZeroExtend => zero_extend(ze.extension, eval(ze.body, env, n)) @@ -127,7 +127,7 @@ trait ConstantPropagationWithSSA(val program: Program, val reachingDefs: Map[CFG case bin: BinaryExpr => val left = eval(bin.arg1, env, n) val right = eval(bin.arg2, env, n) - bin.op match + bin.op match { case BVADD => bvadd(left, right) case BVSUB => bvsub(left, right) case BVMUL => bvmul(left, right) @@ -147,34 +147,31 @@ trait ConstantPropagationWithSSA(val program: Program, val reachingDefs: Map[CFG case BVASHR => bvashr(left, right) case BVCOMP => bvcomp(left, right) case BVCONCAT => concat(left, right) + } case un: UnaryExpr => val arg = eval(un.arg, env, n) - - un.op match + un.op match { case BVNOT => bvnot(arg) case BVNEG => bvneg(arg) + } case _ => Set.empty + } + } /** Transfer function for state lattice elements. */ def localTransfer(n: CFGPosition, s: Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]): Map[RegisterWrapperEqualSets, Set[BitVecLiteral]] = n match { - case r: Command => - r match { - // assignments - case a: Assign => - val lhsWrappers = s.collect { - case (k, v) if RegisterVariableWrapper(k.variable, k.assigns) == RegisterVariableWrapper(a.lhs, getDefinition(a.lhs, r, reachingDefs)) => (k, v) - } - if (lhsWrappers.nonEmpty) { - s ++ lhsWrappers.map((k, v) => (k, v.union(eval(a.rhs, s, r)))) - } else { - s + (RegisterWrapperEqualSets(a.lhs, getDefinition(a.lhs, r, reachingDefs)) -> eval(a.rhs, s, n)) - } - // all others: like no-ops - case _ => s + case a: Assign => + val lhsWrappers = s.collect { + case (k, v) if RegisterVariableWrapper(k.variable, k.assigns) == RegisterVariableWrapper(a.lhs, getDefinition(a.lhs, a, reachingDefs)) => (k, v) + } + if (lhsWrappers.nonEmpty) { + s ++ lhsWrappers.map((k, v) => (k, v.union(eval(a.rhs, s, a)))) + } else { + s + (RegisterWrapperEqualSets(a.lhs, getDefinition(a.lhs, a, reachingDefs)) -> eval(a.rhs, s, n)) } case _ => s } diff --git a/src/main/scala/analysis/GlobalRegionAnalysis.scala b/src/main/scala/analysis/GlobalRegionAnalysis.scala index 93e531e36..47b6b8ddb 100644 --- a/src/main/scala/analysis/GlobalRegionAnalysis.scala +++ b/src/main/scala/analysis/GlobalRegionAnalysis.scala @@ -10,9 +10,9 @@ trait GlobalRegionAnalysis(val program: Program, val constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], val reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], val mmm: MemoryModelMap, - val vsaResult: Option[Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]]) { + val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]) { - var dataCount: Int = 0 + private var dataCount: Int = 0 private def nextDataCount() = { dataCount += 1 s"data_$dataCount" @@ -22,27 +22,22 @@ trait GlobalRegionAnalysis(val program: Program, val lattice: MapLattice[CFGPosition, Set[DataRegion], PowersetLattice[DataRegion]] = MapLattice(regionLattice) - val first: Set[CFGPosition] = Set.empty + program.mainProcedure - - private val stackPointer = Register("R31", 64) - private val linkRegister = Register("R30", 64) - private val framePointer = Register("R29", 64) - private val mallocVariable = Register("R0", 64) + val first: Set[CFGPosition] = Set(program.mainProcedure) private val dataMap: mutable.HashMap[BigInt, DataRegion] = mutable.HashMap() - private def dataPoolMaster(offset: BigInt, size: BigInt): Option[DataRegion] = { + private def dataPoolMaster(offset: BigInt, size: BigInt): DataRegion = { assert(size >= 0) if (dataMap.contains(offset)) { if (dataMap(offset).size < (size.toDouble / 8).ceil.toInt) { dataMap(offset) = DataRegion(dataMap(offset).regionIdentifier, offset, (size.toDouble / 8).ceil.toInt) - Some(dataMap(offset)) + dataMap(offset) } else { - Some(dataMap(offset)) + dataMap(offset) } } else { dataMap(offset) = DataRegion(nextDataCount(), offset, (size.toDouble / 8).ceil.toInt) - Some(dataMap(offset)) + dataMap(offset) } } @@ -74,105 +69,74 @@ trait GlobalRegionAnalysis(val program: Program, def tryCoerceIntoData(exp: Expr, n: Command, subAccess: BigInt, loadOp: Boolean = false): Set[DataRegion] = { val eval = evaluateExpression(exp, constantProp(n)) if (eval.isDefined) { - val region = dataPoolMaster(eval.get.value, subAccess) - if (region.isDefined) { - return Set(region.get) - } - } - exp match - case literal: BitVecLiteral => tryCoerceIntoData(literal, n, subAccess) - case Extract(end, start, body) => tryCoerceIntoData(body, n, subAccess) - case Repeat(repeats, body) => tryCoerceIntoData(body, n, subAccess) - case ZeroExtend(extension, body) => tryCoerceIntoData(body, n, subAccess) - case SignExtend(extension, body) => tryCoerceIntoData(body, n, subAccess) - case UnaryExpr(op, arg) => tryCoerceIntoData(arg, n, subAccess) - case BinaryExpr(op, arg1, arg2) => - val evalArg2 = evaluateExpression(arg2, constantProp(n)) - if (evalArg2.isDefined) { - val firstArg = tryCoerceIntoData(arg1, n, subAccess, true) - var regions = Set.empty[DataRegion] - for (i <- firstArg) { - val newExpr = BinaryExpr(op, BitVecLiteral(i.start, evalArg2.get.size), evalArg2.get) - regions = regions ++ tryCoerceIntoData(newExpr, n, subAccess) - } - return regions - } - Set.empty - case MemoryLoad(mem, index, endian, size) => ??? - case UninterpretedFunction(name, params, returnType) => Set.empty - case variable: Variable => - val ctx = getUse(variable, n, reachingDefs) - var collage = Set.empty[DataRegion] - for (i <- ctx) { - if (i != n) { - var tryVisit = Set.empty[DataRegion] - if (vsaResult.isDefined) { - vsaResult.get.get(i) match - case Some(value) => value match - case Lift(el) => el.get(i.lhs) match - case Some(value) => value.map { - case addressValue: AddressValue => - // find what the region contains - vsaResult.get.get(i) match - case Some(value) => value match - case Lift(el) => el.get(addressValue.region) match - case Some(value) => value.map { - case addressValue: AddressValue => - addressValue.region match - case region: DataRegion => - tryVisit = tryVisit + region - case _ => - case literalValue: LiteralValue => - } - case None => - case LiftedBottom => - case _ => - case None => - case literalValue: LiteralValue => - } - case None => - case LiftedBottom => - case _ => - case None => + Set(dataPoolMaster(eval.get.value, subAccess)) + } else { + exp match { + case literal: BitVecLiteral => tryCoerceIntoData(literal, n, subAccess) + case Extract(_, _, body) => tryCoerceIntoData(body, n, subAccess) + case Repeat(_, body) => tryCoerceIntoData(body, n, subAccess) + case ZeroExtend(_, body) => tryCoerceIntoData(body, n, subAccess) + case SignExtend(_, body) => tryCoerceIntoData(body, n, subAccess) + case UnaryExpr(_, arg) => tryCoerceIntoData(arg, n, subAccess) + case BinaryExpr(op, arg1, arg2) => + val evalArg2 = evaluateExpression(arg2, constantProp(n)) + if (evalArg2.isDefined) { + tryCoerceIntoData(arg1, n, subAccess, true) flatMap { i => + val newExpr = BinaryExpr(op, BitVecLiteral(i.start, evalArg2.get.size), evalArg2.get) + tryCoerceIntoData(newExpr, n, subAccess) } - if (tryVisit.isEmpty) { - tryVisit = localTransfer(i, Set.empty) + } else { + Set() + } + case _: MemoryLoad => ??? + case _: UninterpretedFunction => Set.empty + case variable: Variable => + val ctx = getUse(variable, n, reachingDefs) + val collage = ctx.flatMap { i => + if (i != n) { + val regions: Set[DataRegion] = vsaResult.get(i) match { + case Some(Lift(el)) => + el.getOrElse(i.lhs, Set()).flatMap { + case AddressValue(region) => + el.getOrElse(region, Set()).flatMap { + case AddressValue(dataRegion: DataRegion) => Some(dataRegion) + case _ => None + } + case _ => Set() + } + case _ => Set() + } + if (regions.isEmpty) { + localTransfer(i, Set()) + } else { + regions + } + } else { + Set() } - if (tryVisit.nonEmpty) { - collage = collage ++ tryVisit + } + collage.map { i => + if (!loadOp) { + mmm.relocatedDataRegion(i.start).getOrElse(i) + } else { + resolveGlobalOffsetSecondLast(i) } } - } - collage.map(i => - val resolved = resolveGlobalOffsetSecondLast(i) - if !loadOp then mmm.relocatedDataRegion(i.start).getOrElse(i) else resolved) - case _ => Set.empty + case _ => Set() + } + } } def evalMemLoadToGlobal(index: Expr, size: BigInt, n: Command, loadOp: Boolean = false): Set[DataRegion] = { val indexValue = evaluateExpression(index, constantProp(n)) if (indexValue.isDefined) { val indexValueBigInt = indexValue.get.value - val region = dataPoolMaster(indexValueBigInt, size) - if (region.isDefined) { - return Set(region.get) - } + Set(dataPoolMaster(indexValueBigInt, size)) + } else { + tryCoerceIntoData(index, n, size) } - tryCoerceIntoData(index, n, size) } -// def mergeRegions(regions: Set[DataRegion]): DataRegion = { -// if (regions.size == 1) { -// return regions.head -// } -// val start = regions.minBy(_.start).start -// val end = regions.maxBy(_.end).end -// val size = end - start -// val newRegion = DataRegion(nextDataCount(), start, size) -// regions.foreach(i => dataMap(i.start) = newRegion) -// newRegion -// } - /** * Check if the data region is defined. * Finds full and partial matches @@ -185,47 +149,39 @@ trait GlobalRegionAnalysis(val program: Program, * @return Set[DataRegion] */ def checkIfDefined(dataRegions: Set[DataRegion], n: CFGPosition): Set[DataRegion] = { - var returnSet = Set.empty[DataRegion] - for (i <- dataRegions) { + dataRegions.map { i => val (f, p) = mmm.findDataObjectWithSize(i.start, i.size) val accesses = f.union(p) if (accesses.isEmpty) { - returnSet = returnSet + i + i + } else if (accesses.size == 1) { + dataMap(i.start) = DataRegion(i.regionIdentifier, i.start, i.size.max(accesses.head.size)) + dataMap(i.start) } else { - if (accesses.size == 1) { - dataMap(i.start) = DataRegion(i.regionIdentifier, i.start, i.size.max(accesses.head.size)) - returnSet = returnSet + dataMap(i.start) - } else if (accesses.size > 1) { - val highestRegion = accesses.maxBy(_.start) - dataMap(i.start) = DataRegion(i.regionIdentifier, i.start, i.size.max(highestRegion.end - i.start)) - returnSet = returnSet + dataMap(i.start) - } + val highestRegion = accesses.maxBy(_.start) + dataMap(i.start) = DataRegion(i.regionIdentifier, i.start, i.size.max(highestRegion.end - i.start)) + dataMap(i.start) } } - returnSet } /** Transfer function for state lattice elements. */ def localTransfer(n: CFGPosition, s: Set[DataRegion]): Set[DataRegion] = { n match { - case cmd: Command => - cmd match { - case memAssign: MemoryAssign => - return checkIfDefined(evalMemLoadToGlobal(memAssign.index, memAssign.size, cmd), n) - case assign: Assign => - val unwrapped = unwrapExpr(assign.rhs) - if (unwrapped.isDefined) { - return checkIfDefined(evalMemLoadToGlobal(unwrapped.get.index, unwrapped.get.size, cmd, loadOp = true), n) - } else { - // this is a constant but we need to check if it is a data region - return checkIfDefined(evalMemLoadToGlobal(assign.rhs, 1, cmd), n) - } - case _ => + case memAssign: MemoryAssign => + checkIfDefined(evalMemLoadToGlobal(memAssign.index, memAssign.size, memAssign), n) + case assign: Assign => + val unwrapped = unwrapExpr(assign.rhs) + if (unwrapped.isDefined) { + checkIfDefined(evalMemLoadToGlobal(unwrapped.get.index, unwrapped.get.size, assign, loadOp = true), n) + } else { + // this is a constant but we need to check if it is a data region + checkIfDefined(evalMemLoadToGlobal(assign.rhs, 1, assign), n) } case _ => + Set() } - Set.empty } def transfer(n: CFGPosition, s: Set[DataRegion]): Set[DataRegion] = localTransfer(n, s) @@ -237,7 +193,7 @@ class GlobalRegionAnalysisSolver( constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], mmm: MemoryModelMap, - vsaResult: Option[Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]] + vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] ) extends GlobalRegionAnalysis(program, domain, constantProp, reachingDefs, mmm, vsaResult) with IRIntraproceduralForwardDependencies with Analysis[Map[CFGPosition, Set[DataRegion]]] diff --git a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala index d8e0155bf..c05f83d60 100644 --- a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala +++ b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala @@ -22,116 +22,50 @@ case class RegisterVariableWrapper(variable: Variable, assigns: Set[Assign]) { /** Wrapper for variables so we can have ConstantPropegation-specific equals method indirectly * Relies on SSA sets being exactly the same * */ -case class RegisterWrapperEqualSets(variable: Variable, assigns: Set[Assign]) { - override def equals(obj: Any): Boolean = { - obj match { - case RegisterWrapperEqualSets(other, otherAssigns) => - variable == other && assigns == otherAssigns - case _ => - false - } - } -} +case class RegisterWrapperEqualSets(variable: Variable, assigns: Set[Assign]) /** Steensgaard-style pointer analysis. The analysis associates an [[StTerm]] with each variable declaration and * expression node in the AST. It is implemented using [[analysis.solvers.UnionFindSolver]]. */ class InterprocSteensgaardAnalysis( domain: Set[CFGPosition], - constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], mmm: MemoryModelMap, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - globalOffsets: Map[BigInt, BigInt], - vsaResult: Option[Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]]) extends Analysis[Any] { + vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]) extends Analysis[Any] { val solver: UnionFindSolver[StTerm] = UnionFindSolver() - private val stackPointer = Register("R31", 64) - private val linkRegister = Register("R30", 64) - private val framePointer = Register("R29", 64) - private val ignoreRegions: Set[Expr] = Set(linkRegister, framePointer) private val mallocVariable = Register("R0", 64) - var mallocCount: Int = 0 - var stackCount: Int = 0 - val stackMap: mutable.Map[Expr, StackRegion] = mutable.Map() - - private val memoryRegionContents: mutable.Map[MemoryRegion, mutable.Set[BitVecLiteral | MemoryRegion]] = mutable.Map() - - def getMemoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]] = memoryRegionContents.map((k, v) => k -> v.toSet).toMap - - /** - * In expressions that have accesses within a region, we need to relocate - * the base address to the actual address using the relocation table. - * MUST RELOCATE because MMM iterate to find the lowest address - * TODO: May need to iterate over the relocation table to find the actual address - * - * @param address - * @return BitVecLiteral: the relocated address - */ - def relocatedBase(address: BigInt): BitVecLiteral = { - val tableAddress = globalOffsets.getOrElse(address, address) - // this condition checks if the address is not layered and returns if it is not - if (tableAddress != address && !globalOffsets.contains(tableAddress)) { - BitVecLiteral(address, 64) - } else { - BitVecLiteral(tableAddress, 64) - } - } - - def nodeToRegion(n: CFGPosition): Set[MemoryRegion] = { - var returnRegions = Set.empty[MemoryRegion] - n match { - case directCall: DirectCall => - returnRegions = returnRegions + mmm.getHeap(directCall).asInstanceOf[MemoryRegion] - case _ => - returnRegions = returnRegions ++ mmm.getStack(n).asInstanceOf[Set[MemoryRegion]] ++ mmm.getData(n).asInstanceOf[Set[MemoryRegion]] - } - returnRegions - } - - def canCoerceIntoDataRegion(bitVecLiteral: BitVecLiteral, size: Int): Option[DataRegion] = { - mmm.findDataObject(bitVecLiteral.value) - } - def vsaApproximation(variable: Variable, n: CFGPosition): Set[MemoryRegion] = { val ctx = getUse(variable, n, reachingDefs) - var collage = Set.empty[MemoryRegion] - for (i <- ctx) { + ctx.flatMap { i => if (i != n) { - var tryVisit = Set.empty[MemoryRegion] - if (vsaResult.isDefined) { - vsaResult.get.get(i) match - case Some(value) => value match - case Lift(el) => el.get(i.lhs) match - case Some(value) => value.foreach { - case addressValue: AddressValue => - tryVisit = tryVisit + addressValue.region - case literalValue: LiteralValue => - } - case None => - case LiftedBottom => - case _ => - case None => - } -// if (tryVisit.isEmpty) { -// tryVisit = localTransfer(i, Set.empty) -// } - if (tryVisit.nonEmpty) { - collage = collage ++ tryVisit + vsaResult.get(i) match { + case Some(Lift(el)) => el.get(i.lhs) match { + case Some(values) => values.flatMap { + case addressValue: AddressValue => + Some(addressValue.region) + case _: LiteralValue => None + } + case None => Set() + } + case _ => Set() } + } else { + Set() } } - collage } /** @inheritdoc */ - def analyze(): Unit = + def analyze(): Unit = { // generate the constraints by traversing the AST and solve them on-the-fly - domain.foreach(p => { + domain.foreach { p => visit(p, ()) - }) + } + } /** Generates the constraints for the given sub-AST. * @param node @@ -141,53 +75,47 @@ class InterprocSteensgaardAnalysis( */ def visit(node: CFGPosition, arg: Unit): Unit = { node match { - case cmd: Command => - cmd match { - case directCall: DirectCall if directCall.target.name == "malloc" => - // X = alloc P: [[X]] = ↑[[alloc-i]] - val alloc = nodeToRegion(cmd).head - val defs = getDefinition(mallocVariable, cmd, reachingDefs) - unify(IdentifierVariable(RegisterWrapperEqualSets(mallocVariable, defs)), PointerRef(AllocVariable(alloc))) - case assign: Assign => - val unwrapped = unwrapExprToVar(assign.rhs) - if (unwrapped.isDefined) { - // X1 = X2: [[X1]] = [[X2]] - val X1 = assign.lhs - val X2 = unwrapped.get - unify(IdentifierVariable(RegisterWrapperEqualSets(X1, getDefinition(X1, cmd, reachingDefs))), IdentifierVariable(RegisterWrapperEqualSets(X2, getUse(X2, cmd, reachingDefs)))) - } else { - // X1 = *X2: [[X2]] = ↑a ^ [[X1]] = a where a is a fresh term variable - val X1 = assign.lhs - val X2_star = nodeToRegion(node) - val alpha = FreshVariable() - X2_star.foreach( - x => - unify(PointerRef(alpha), ExpressionVariable(x)) - ) - unify(IdentifierVariable(RegisterWrapperEqualSets(X1, getDefinition(X1, cmd, reachingDefs))), alpha) - } - case memoryAssign: MemoryAssign => - // *X1 = X2: [[X1]] = ↑a ^ [[X2]] = a where a is a fresh term variable - val X1_star = nodeToRegion(node) - // TODO: This is risky as it tries to coerce every value to a region (needed for functionpointer example) - val unwrapped = unwrapExprToVar(memoryAssign.value) - if (unwrapped.isDefined) { - val X2 = unwrapped.get - val X2_regions: Set[MemoryRegion] = vsaApproximation(X2, node) - - val alpha = FreshVariable() - val pointerRef = PointerRef(alpha) - X1_star.foreach(x => - unify(ExpressionVariable(x), pointerRef) - ) - X2_regions.foreach( - x => - unify(ExpressionVariable(x), alpha) - ) - } - case _ => // do nothing TODO: Maybe LocalVar too? + case directCall: DirectCall if directCall.target.name == "malloc" => + // X = alloc P: [[X]] = ↑[[alloc-i]] + val alloc = mmm.nodeToRegion(directCall).head + val defs = getDefinition(mallocVariable, directCall, reachingDefs) + unify(IdentifierVariable(RegisterWrapperEqualSets(mallocVariable, defs)), PointerRef(AllocVariable(alloc))) + case assign: Assign => + val unwrapped = unwrapExprToVar(assign.rhs) + if (unwrapped.isDefined) { + // X1 = X2: [[X1]] = [[X2]] + val X1 = assign.lhs + val X2 = unwrapped.get + unify(IdentifierVariable(RegisterWrapperEqualSets(X1, getDefinition(X1, assign, reachingDefs))), IdentifierVariable(RegisterWrapperEqualSets(X2, getUse(X2, assign, reachingDefs)))) + } else { + // X1 = *X2: [[X2]] = ↑a ^ [[X1]] = a where a is a fresh term variable + val X1 = assign.lhs + val X2_star = mmm.nodeToRegion(node) + val alpha = FreshVariable() + X2_star.foreach { x => + unify(PointerRef(alpha), ExpressionVariable(x)) + } + unify(IdentifierVariable(RegisterWrapperEqualSets(X1, getDefinition(X1, assign, reachingDefs))), alpha) } - case _ => + case memoryAssign: MemoryAssign => + // *X1 = X2: [[X1]] = ↑a ^ [[X2]] = a where a is a fresh term variable + val X1_star = mmm.nodeToRegion(node) + // TODO: This is risky as it tries to coerce every value to a region (needed for functionpointer example) + val unwrapped = unwrapExprToVar(memoryAssign.value) + if (unwrapped.isDefined) { + val X2 = unwrapped.get + val X2_regions: Set[MemoryRegion] = vsaApproximation(X2, node) + + val alpha = FreshVariable() + val pointerRef = PointerRef(alpha) + X1_star.foreach { x => + unify(ExpressionVariable(x), pointerRef) + } + X2_regions.foreach { x => + unify(ExpressionVariable(x), alpha) + } + } + case _ => // do nothing TODO: Maybe LocalVar too? } } @@ -214,19 +142,9 @@ class InterprocSteensgaardAnalysis( }.toSet a + (v.id -> pt) } - Logger.debug(s"\nPoints-to:\n${pointsto.map(p => s"${p._1} -> { ${p._2.mkString(",")} }").mkString("\n")}\n") + Logger.debug(s"\nPoints-to:\n${pointsto.map((k, v) => s"$k -> { ${v.mkString(",")} }").mkString("\n")}\n") pointsto } - - /** @inheritdoc - */ - def mayAlias(): (RegisterWrapperEqualSets, RegisterWrapperEqualSets) => Boolean = { - val solution = solver.solution() - (id1: RegisterWrapperEqualSets, id2: RegisterWrapperEqualSets) => - val sol1 = solution(IdentifierVariable(id1)) - val sol2 = solution(IdentifierVariable(id2)) - sol1 == sol2 && sol1.isInstanceOf[PointerRef] // same equivalence class, and it contains a reference - } } /** Terms used in unification. @@ -261,7 +179,7 @@ case class FreshVariable(id: Int) extends StTerm with Var[StTerm] { } object FreshVariable { - var n = 0 + private var n = 0 def next(): Int = { n += 1 diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index 1d1585a1c..94f7b78a4 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -7,9 +7,6 @@ import util.Logger import scala.collection.immutable.TreeMap import scala.collection.mutable -enum MemoryType: - case Data, Heap, Stack - // Define a case class to represent a range case class RangeKey(start: BigInt, end: BigInt) extends Ordered[RangeKey]: val size: BigInt = end - start + 1 @@ -20,10 +17,8 @@ case class RangeKey(start: BigInt, end: BigInt) extends Ordered[RangeKey]: } override def toString: String = s"Range[$start, $end] (size: $size)" - // Custom data structure for storing range-to-object mappings class MemoryModelMap(val globalOffsets: Map[BigInt, BigInt]) { - private val MAX_BIGINT: BigInt = BigInt(Long.MaxValue) private val contextStack = mutable.Stack.empty[String] private val sharedContextStack = mutable.Stack.empty[List[StackRegion]] private val localStacks = mutable.Map[String, List[StackRegion]]().withDefaultValue(List.empty) @@ -41,52 +36,7 @@ class MemoryModelMap(val globalOffsets: Map[BigInt, BigInt]) { private val stackAllocationSites: mutable.Map[CFGPosition, Set[StackRegion]] = mutable.Map() - private val uf = new UnionFind() - private var DataMemory, HeapMemory, StackMemory = TreeMap[BigInt, Array[Byte]]() - - - - // Store operation: store BigInt value at a BigInt address - def store(address: BigInt, value: BigInt, memoryType: MemoryType): Unit = { - val byteArray = value.toByteArray - memoryType match - case MemoryType.Data => DataMemory += (address -> byteArray) - case MemoryType.Heap => HeapMemory += (address -> byteArray) - case MemoryType.Stack => StackMemory += (address -> byteArray) - } - - // Load operation: load from a BigInt address with a specific size - def load(address: BigInt, size: Int, memoryType: MemoryType): BigInt = { - val memory = memoryType match - case MemoryType.Data => DataMemory - case MemoryType.Heap => HeapMemory - case MemoryType.Stack => StackMemory - // Find the memory block that contains the starting address - val floorEntry = memory.rangeTo(address).lastOption - - floorEntry match { - case Some((startAddress, byteArray)) => - val offset = (address - startAddress).toInt // Offset within the byte array - // If the load exceeds the stored data, we need to handle padding with zeros - if (offset >= byteArray.length) { - BigInt(0) - } else { - // Calculate how much data we can retrieve - val availableSize = byteArray.length - offset - // Slice the available data, and if requested size exceeds, append zeros - val result = byteArray.slice(offset, offset + size) - val paddedResult = if (size > availableSize) { - result ++ Array.fill(size - availableSize)(0.toByte) // Padding with zeros - } else { - result - } - BigInt(1, paddedResult) // Convert the byte array back to BigInt - } - case None => - // If no memory is stored at the requested address, return zero - BigInt(0) // TODO: may need to be sm else - } - } + private val uf = UnionFind() /** Add a range and object to the mapping * @@ -97,10 +47,10 @@ class MemoryModelMap(val globalOffsets: Map[BigInt, BigInt]) { */ def add(offset: BigInt, region: MemoryRegion, shared: Boolean = false): Unit = { def maxSize(r: MemoryRegion): BigInt = { - r match - case DataRegion(regionIdentifier, start, size) => start + size - case HeapRegion(regionIdentifier, start, size, parent) => ??? - case StackRegion(regionIdentifier, start, parent) => + r match { + case DataRegion(_, start, size) => start + size + case _: HeapRegion => ??? + case StackRegion(_, start, _) => if (r.subAccesses.nonEmpty) { val max = start + r.subAccesses.max max @@ -108,6 +58,7 @@ class MemoryModelMap(val globalOffsets: Map[BigInt, BigInt]) { ??? } case _ => ??? + } } def regionsOverlap(r1: RangeKey, r2: RangeKey): Boolean = { @@ -152,7 +103,6 @@ class MemoryModelMap(val globalOffsets: Map[BigInt, BigInt]) { currentHeapMap(RangeKey(offset, offset + h.size - 1)) = h } else { val currentMaxRange = currentHeapMap.keys.maxBy(_.end) - val currentMaxRegion = currentHeapMap(currentMaxRange) currentHeapMap(RangeKey(currentMaxRange.start + 1, h.size - 1)) = h } } @@ -165,13 +115,13 @@ class MemoryModelMap(val globalOffsets: Map[BigInt, BigInt]) { } // size of pointer is 8 bytes - val SIZE_OF_POINTER = 8 + private val SIZE_OF_POINTER = 8 def preLoadGlobals(externalFunctions: Map[BigInt, String], globalAddresses: Map[BigInt, String], globalSizes: Map[String, Int]): Unit = { - val relocRegions = globalOffsets.map((offset, _) => DataRegion(nextRelocCount(), offset, SIZE_OF_POINTER)) + val relocRegions = globalOffsets.keys.map(offset => DataRegion(nextRelocCount(), offset, SIZE_OF_POINTER)) // map externalFunctions name, value to DataRegion(name, value) and then sort by value - val filteredGlobalOffsets = globalAddresses.filterNot((offset, name) => externalFunctions.contains(offset)) + val filteredGlobalOffsets = globalAddresses.filterNot((offset, _) => externalFunctions.contains(offset)) val externalFunctionRgns = (externalFunctions ++ filteredGlobalOffsets).map((offset, name) => DataRegion(name, offset, (globalSizes.getOrElse(name, 1).toDouble / 8).ceil.toInt)) @@ -182,21 +132,25 @@ class MemoryModelMap(val globalOffsets: Map[BigInt, BigInt]) { } // cannot fail to find any regions here - relocatedAddressesMap = globalOffsets.map((offset, offset2) => { - val newRegion = findDataObject(offset2).get - (offset, newRegion) - }) + relocatedAddressesMap = globalOffsets.map { (offset, offset2) => + (offset, findDataObject(offset2).get) + } } def relocatedDataRegion(value: BigInt): Option[DataRegion] = { relocatedAddressesMap.get(value) } - def convertMemoryRegions(stackRegionsPerProcedure: mutable.Map[Procedure, mutable.Set[StackRegion]], heapRegions: mutable.Map[DirectCall, HeapRegion], mergeRegions: mutable.Set[Set[MemoryRegion]], allocationSites: Map[CFGPosition, Set[StackRegion]], procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]], graRegions: mutable.HashMap[BigInt, DataRegion], graResults: Map[CFGPosition, Set[DataRegion]]): Unit = { + def convertMemoryRegions(stackRegionsPerProcedure: mutable.Map[Procedure, mutable.Set[StackRegion]], + heapRegions: mutable.Map[DirectCall, HeapRegion], + allocationSites: Map[CFGPosition, Set[StackRegion]], + procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]], + graRegions: mutable.HashMap[BigInt, DataRegion], + graResults: Map[CFGPosition, Set[DataRegion]]): Unit = { //val keepData = dataMap.filterNot((range, region) => graRegions.contains(region.start)).map((range, region) => region) val oldRegions = dataMap.values.toSet dataMap.clear() - for (dr <- graRegions.map((_, dataRegion) => dataRegion)) { + for (dr <- graRegions.values) { add(dr.start, dr) } for (dr <- oldRegions) { @@ -215,15 +169,15 @@ class MemoryModelMap(val globalOffsets: Map[BigInt, BigInt]) { cfgPositionToDataRegion ++= graResults stackAllocationSites ++= allocationSites - stackRegionsPerProcedure.keys.foreach(exitNode => - if (procedureToSharedRegions.contains(exitNode)) { - val sharedRegions = procedureToSharedRegions(exitNode) - sharedStacks(exitNode.name) = sharedRegions.collect { case r: StackRegion => r }.toList.sortBy(_.start) + stackRegionsPerProcedure.keys.foreach { proc => + if (procedureToSharedRegions.contains(proc)) { + val sharedRegions = procedureToSharedRegions(proc) + sharedStacks(proc.name) = sharedRegions.collect { case r: StackRegion => r }.toList.sortBy(_.start) } // for each function exit node we get the memory region and add it to the mapping - val stackRgns = stackRegionsPerProcedure(exitNode).toList.sortBy(_.start) - localStacks(exitNode.name) = stackRgns - ) + val stackRgns = stackRegionsPerProcedure(proc).toList.sortBy(_.start) + localStacks(proc.name) = stackRgns + } heapCalls ++= heapRegions // add heap regions @@ -232,11 +186,6 @@ class MemoryModelMap(val globalOffsets: Map[BigInt, BigInt]) { add(rangeStart, heapRegion) } - // merge regions - for (regions <- mergeRegions) { - uf.bulkUnion(regions) - } - /* this is done because the stack regions will change after MMM transforms them and merges some of them based on size, thus we need to alter the results of the analysis to match MMM transformations @@ -302,144 +251,9 @@ class MemoryModelMap(val globalOffsets: Map[BigInt, BigInt]) { } } - /* All regions that either: - * 1. starts at value but size less than region size - * 2. starts at value but size more than region size (add both regions ie. next region) - * 3. starts between regions (start, end) and (value + size) => end - * 4. starts between regions (start, end) and (value + size) < end (add both regions ie. next region) - */ - def findStackPartialAccessesOnly(value: BigInt, size: BigInt): Set[StackRegion] = { - val matchingRegions = scala.collection.mutable.Set[StackRegion]() - - stackMap.foreach { case (range, region) => - // Condition 1: Starts at value but size less than region size - if (range.start == value && range.size > size) { - matchingRegions += region - } - // Condition 2: Starts at value but size more than region size (add subsequent regions) - else if (range.start == value && range.size < size) { - matchingRegions += region - var remainingSize = size - range.size - var nextStart = range.end - stackMap.toSeq.sortBy(_._1.start).dropWhile(_._1.start <= range.start).foreach { case (nextRange, nextRegion) => - if (remainingSize > 0) { - matchingRegions += nextRegion - remainingSize -= nextRange.size - nextStart = nextRange.end - } - } - } - // Condition 3: Starts between regions (start, end) and (value + size) => end - else if (range.start < value && (value + size) <= range.end) { - matchingRegions += region - } - // Condition 4: Starts between regions (start, end) and (value + size) < end (add subsequent regions) - else if (range.start < value && (value + size) > range.end) { - matchingRegions += region - var remainingSize = (value + size) - range.end - var nextStart = range.end - stackMap.toSeq.sortBy(_._1.start).dropWhile(_._1.start <= range.start).foreach { case (nextRange, nextRegion) => - if (remainingSize > 0) { - matchingRegions += nextRegion - remainingSize -= nextRange.size - nextStart = nextRange.end - } - } - } - } - - matchingRegions.toSet.map(returnRegion) - } - - def getRegionsWithSize(size: BigInt, function: String, negateCondition: Boolean = false): Set[MemoryRegion] = { - val matchingRegions = scala.collection.mutable.Set[MemoryRegion]() - - pushContext(function) - stackMap.foreach { - case (range, region) => - if (negateCondition) { - if (range.size != size) { - matchingRegions += region - } - } else if (range.size == size) { - matchingRegions += region - } - } - popContext() - - heapMap.foreach { case (range, region) => - if (negateCondition) { - if (range.size != size) { - matchingRegions += region - } - } else if (range.size == size) { - matchingRegions += region - } - } - - dataMap.foreach { case (range, region) => - if (negateCondition) { - if (range.size != size) { - matchingRegions += region - } - } else if (range.size == size) { - matchingRegions += region - } - } - - matchingRegions.toSet.map(returnRegion) - } - - def getAllocsPerProcedure: Map[String, Set[StackRegion]] = { - localStacks.map((name, stackRegions) => (name, stackRegions.toSet.map(returnRegion))).toMap - } - - def getAllStackRegions: Set[StackRegion] = { - localStacks.values.toSet.flatten.map(returnRegion) - } - - def getAllDataRegions: Set[DataRegion] = { - dataMap.values.toSet.map(returnRegion) - } - - def getAllHeapRegions: Set[HeapRegion] = { - heapMap.values.toSet.map(returnRegion) - } - - def getAllRegions: Set[MemoryRegion] = { - getAllStackRegions ++ getAllDataRegions ++ getAllHeapRegions - } - - def getEnd(memoryRegion: MemoryRegion): BigInt = { // TODO: This would return a list of ends - val range = memoryRegion match { - case stackRegion: StackRegion => - stackMap.find((_, obj) => obj == stackRegion).map((range, _) => range).getOrElse(RangeKey(0, 0)) - case heapRegion: HeapRegion => - heapMap.find((_, obj) => obj == heapRegion).map((range, _) => range).getOrElse(RangeKey(0, 0)) - case dataRegion: DataRegion => - dataMap.find((_, obj) => obj == dataRegion).map((range, _) => range).getOrElse(RangeKey(0, 0)) - } - range.end - } - - /* All regions that start at value and are exactly of length size */ - def findStackFullAccessesOnly(value: BigInt, size: BigInt): Option[StackRegion] = { - stackMap.find((range, _) => range.start == value && range.size == size).map((range, obj) => returnRegion(obj)) - } - def findStackObject(value: BigInt): Option[StackRegion] = stackMap.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => returnRegion(obj)) - def isStackBase(value: BigInt): Option[StackRegion] = { - val found = stackMap.find((range, _) => range.start == value) - if (found.isDefined) then Some(returnRegion(found.get._2)) else None - } - - def isDataBase(value: BigInt): Option[DataRegion] = { - val found = dataMap.find((range, _) => range.start == value) - if (found.isDefined) then Some(returnRegion(found.get._2)) else None - } - def findSharedStackObject(value: BigInt): Set[StackRegion] = sharedStackMap.values.flatMap(_.find((range, _) => range.start <= value && value <= range.end).map((range, obj) => returnRegion(obj))).toSet @@ -556,6 +370,15 @@ class MemoryModelMap(val globalOffsets: Map[BigInt, BigInt]) { def getData(cfgPosition: CFGPosition): Set[DataRegion] = { cfgPositionToDataRegion.getOrElse(cfgPosition, Set.empty).map(returnRegion) } + + def nodeToRegion(n: CFGPosition): Set[MemoryRegion] = { + n match { + case directCall: DirectCall => + Set(getHeap(directCall)) + case _ => + getStack(n) ++ getData(n) + } + } } trait MemoryRegion { @@ -576,7 +399,6 @@ case class DataRegion(override val regionIdentifier: String, override val start: override def toString: String = s"Data($regionIdentifier, $start, $size, ($relfContent))" def end: BigInt = start + size - 1 val relfContent: mutable.Set[String] = mutable.Set[String]() - val isPointerTo: Option[DataRegion] = None } class UnionFind { @@ -620,18 +442,4 @@ class UnionFind { } } - def bulkUnion(regions: Set[MemoryRegion]): Unit = { - val roots = regions.map(find) - val root = roots.head - for (region <- roots) { - if (region != root) { - union(root, region) - } - } - } - - // Check if two regions are in the same set - def connected(region1: MemoryRegion, region2: MemoryRegion): Boolean = { - find(region1) == find(region2) - } } \ No newline at end of file diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index e3f65fa30..a37f6c7be 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -19,7 +19,7 @@ trait MemoryRegionAnalysis(val program: Program, val reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], val graResult: Map[CFGPosition, Set[DataRegion]]) { - var mallocCount: BigInt = 0 + private var mallocCount: BigInt = 0 private var stackCount: Int = 0 val stackMap: mutable.Map[Procedure, mutable.Map[BigInt, StackRegion]] = mutable.Map() @@ -65,10 +65,10 @@ trait MemoryRegionAnalysis(val program: Program, if (spList.contains(assign.rhs)) { // add lhs to spList spList.addOne(assign.lhs) - } else { + // TODO: This is a hack: it should check for stack ptr using the wrapper + } else if (spList.contains(assign.lhs) && assign.lhs != stackPointer) { // remove lhs from spList - if spList.contains(assign.lhs) && assign.lhs != stackPointer then // TODO: This is a hack: it should check for stack ptr using the wrapper - spList.remove(spList.indexOf(assign.lhs)) + spList.remove(spList.indexOf(assign.lhs)) } // TODO: should handle the store case (last case) case _ => @@ -82,23 +82,13 @@ trait MemoryRegionAnalysis(val program: Program, val first: Set[CFGPosition] = Set.empty + program.mainProcedure private val stackPointer = Register("R31", 64) - private val linkRegister = Register("R30", 64) - private val framePointer = Register("R29", 64) private val mallocVariable = Register("R0", 64) private val spList = ListBuffer[Expr](stackPointer) - private val ignoreRegions: Set[Expr] = Set(linkRegister, framePointer) - // TODO: this could be used instead of regionAccesses in other analyses to reduce the Expr to region conversion - private val registerToRegions: mutable.Map[RegisterVariableWrapper, mutable.Set[MemoryRegion]] = mutable.Map() val procedureToSharedRegions: mutable.Map[Procedure, mutable.Set[MemoryRegion]] = mutable.Map() var procedureToStackRegions: mutable.Map[Procedure, mutable.Set[StackRegion]] = mutable.Map() var procedureToHeapRegions: mutable.Map[DirectCall, HeapRegion] = mutable.Map() - var memLoadToRegion: mutable.Map[MemoryLoad, MemoryRegion] = mutable.Map() var mergeRegions: mutable.Set[Set[MemoryRegion]] = mutable.Set() - def addMergableRegions(regions: Set[MemoryRegion]): Unit = { - mergeRegions.add(regions) - } - def addReturnStack(procedure: Procedure, returnRegion: StackRegion): Unit = { procedureToStackRegions.getOrElseUpdate(procedure, mutable.Set.empty).add(returnRegion) } @@ -107,38 +97,37 @@ trait MemoryRegionAnalysis(val program: Program, procedureToHeapRegions.put(directCall, returnRegion) } - def addMemLoadRegion(memoryLoad: MemoryLoad, memoryRegion: StackRegion): Unit = { - memLoadToRegion.put(memoryLoad, memoryRegion) - } - def reducibleToRegion(binExpr: BinaryExpr, n: Command, subAccess: BigInt): Set[StackRegion] = { - var reducedRegions = Set.empty[StackRegion] - binExpr.arg1 match { + val reducedRegions = binExpr.arg1 match { case variable: Variable if !spList.contains(variable) => - val ctx = getUse(variable, n, reachingDefs) - for (i <- ctx) { - val regions = eval(i.rhs, Set.empty, i, subAccess) - evaluateExpression(binExpr.arg2, constantProp(n)) match { - case Some(b: BitVecLiteral) => - regions.foreach { stackRegion => - val nextOffset = bitVectorOpToBigIntOp(binExpr.op, stackRegion.start, b.value) - reducedRegions = reducedRegions + poolMaster(nextOffset, IRWalk.procedure(n), subAccess) - } - case None => - } + evaluateExpression(binExpr.arg2, constantProp(n)) match { + case Some(b: BitVecLiteral) => + val ctx = getUse(variable, n, reachingDefs) + for { + i <- ctx + stackRegion <- eval(i.rhs, Set.empty, i, subAccess) + } yield { + val nextOffset = bitVectorOpToBigIntOp(binExpr.op, stackRegion.start, b.value) + poolMaster(nextOffset, IRWalk.procedure(n), subAccess) + } + case None => + Set() } case _ => - reducedRegions = reducedRegions ++ eval(binExpr, Set.empty, n, subAccess) + eval(binExpr, Set.empty, n, subAccess) } reducedRegions } def reducibleVariable(variable: Variable, n: Command, subAccess: BigInt): Set[StackRegion] = { - var regions = Set.empty[StackRegion] val ctx = getDefinition(variable, n, reachingDefs) - for (i <- ctx) { - if (i != n) { // TODO: nicer way to deal with loops (a variable is being incremented in a loop) - regions = regions ++ eval(i.rhs, Set.empty, i, subAccess) + + // TODO: nicer way to deal with loops (a variable is being incremented in a loop) + val regions = ctx.flatMap { i => + if (i != n) { + eval(i.rhs, Set.empty, i, subAccess) + } else { + Set() } } regions @@ -146,59 +135,56 @@ trait MemoryRegionAnalysis(val program: Program, def eval(exp: Expr, env: Set[StackRegion], n: Command, subAccess: BigInt): Set[StackRegion] = { if (graResult(n).nonEmpty) { - return Set.empty // skip global memory regions - } - exp match { - case binOp: BinaryExpr => - if (spList.contains(binOp.arg1)) { - evaluateExpression(binOp.arg2, constantProp(n)) match { + Set.empty // skip global memory regions + } else { + exp match { + case binOp: BinaryExpr => + if (spList.contains(binOp.arg1)) { + evaluateExpression(binOp.arg2, constantProp(n)) match { + case Some(b: BitVecLiteral) => + val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value + Set(poolMaster(negB, IRWalk.procedure(n), subAccess)) + case None => Set.empty + } + } else if (reducibleToRegion(binOp, n, subAccess).nonEmpty) { + reducibleToRegion(binOp, n, subAccess) + } else { + Set.empty + } + case reg: Register if spList.contains(reg) => // TODO: this is a hack because spList is not comprehensive it needs to be a standalone analysis + if (getDefinition(reg, n, reachingDefs).isEmpty) { + Set(poolMaster(Long.MaxValue, IRWalk.procedure(n), subAccess)) + } else { + reducibleVariable(reg, n, subAccess) + } + case variable: Variable => + evaluateExpression(variable, constantProp(n)) match { case Some(b: BitVecLiteral) => - val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value - Set(poolMaster(negB, IRWalk.procedure(n), subAccess)) - case None => Set.empty + eval(b, env, n, subAccess) + case _ => + reducibleVariable(variable, n, subAccess) } - } else if (reducibleToRegion(binOp, n, subAccess).nonEmpty) { - reducibleToRegion(binOp, n, subAccess) - } else { + case memoryLoad: MemoryLoad => + eval(memoryLoad.index, env, n, memoryLoad.size) + // ignore case where it could be a global region (loaded later in MMM from relf) + case _: BitVecLiteral => Set.empty - } - case variable: Variable => - variable match { - case reg: Register if spList.contains(reg) => // TODO: this is a hack because spList is not comprehensive it needs to be a standalone analysis - if getDefinition(variable, n, reachingDefs).isEmpty then - Set(poolMaster(Long.MaxValue, IRWalk.procedure(n), subAccess)) - else - reducibleVariable(variable, n, subAccess) - case _ => - evaluateExpression(variable, constantProp(n)) match { - case Some(b: BitVecLiteral) => - eval(b, env, n, subAccess) - case _ => - reducibleVariable(variable, n, subAccess) - } - } - case memoryLoad: MemoryLoad => - eval(memoryLoad.index, env, n, memoryLoad.size) - // ignore case where it could be a global region (loaded later in MMM from relf) - case b: BitVecLiteral => - Set.empty - // we cannot evaluate this to a concrete value, we need VSA for this - case _ => - Logger.debug(s"type: ${exp.getClass} $exp\n") - throw new Exception("Unknown type") + // we cannot evaluate this to a concrete value, we need VSA for this + case _ => + Logger.debug(s"type: ${exp.getClass} $exp\n") + throw new Exception("Unknown type") + } } } /** Transfer function for state lattice elements. */ def localTransfer(n: CFGPosition, s: Set[StackRegion]): Set[StackRegion] = n match { - case cmd: Command => - cmd match { - case directCall: DirectCall => - val ANR = ANRResult(cmd) - val RNA = RNAResult(program.procedures.filter(fn => fn == directCall.target).head) - val parameters = RNA.intersect(ANR) - // TODO: Re-enable when ReachingDef has interprocedural option + case directCall: DirectCall => + // TODO: Re-enable when ReachingDef has interprocedural option + // val ANR = ANRResult(directCall) + // val RNA = RNAResult(program.procedures.filter(fn => fn == directCall.target).head) + // val parameters = RNA.intersect(ANR) // val ctx = regionAccesses(cmd) // for (elem <- parameters) { // if (ctx.contains(RegisterVariableWrapper(elem, getUse(elem, cmd.data, reachingDefs)))) { @@ -212,96 +198,47 @@ trait MemoryRegionAnalysis(val program: Program, // } // } // } - if (directCall.target.name == "malloc") { - evaluateExpression(mallocVariable, constantProp(n)) match { - case Some(b: BitVecLiteral) => - val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value - val (name, start) = nextMallocCount(negB) - val newHeapRegion = HeapRegion(name, start, negB, IRWalk.procedure(n)) - addReturnHeap(directCall, newHeapRegion) - s - case None => - // Assume heap region size is at least 1 TODO: must approximate size of heap - val negB = 1 - val (name, start) = nextMallocCount(negB) - val newHeapRegion = HeapRegion(name, start, negB, IRWalk.procedure(n)) - addReturnHeap(directCall, newHeapRegion) - s - } - } else { + if (directCall.target.name == "malloc") { + evaluateExpression(mallocVariable, constantProp(n)) match { + case Some(b: BitVecLiteral) => + val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value + val (name, start) = nextMallocCount(negB) + val newHeapRegion = HeapRegion(name, start, negB, IRWalk.procedure(n)) + addReturnHeap(directCall, newHeapRegion) s - } - case memAssign: MemoryAssign => - val result = eval(memAssign.index, s, cmd, memAssign.size) + case None => + // Assume heap region size is at least 1 TODO: must approximate size of heap + val negB = 1 + val (name, start) = nextMallocCount(negB) + val newHeapRegion = HeapRegion(name, start, negB, IRWalk.procedure(n)) + addReturnHeap(directCall, newHeapRegion) + s + } + } else { + s + } + case memAssign: MemoryAssign => + val result = eval(memAssign.index, s, memAssign, memAssign.size) // if (result.size > 1) { // //throw new Exception(s"Memory load resulted in multiple regions ${result} for mem load $memoryLoad") // addMergableRegions(result) // } - result - case assign: Assign => - stackDetection(assign) - var m = Set[StackRegion]() - - val unwrapped = unwrapExpr(assign.rhs) - if (unwrapped.isDefined) - val result = eval(unwrapped.get.index, s, cmd, unwrapped.get.size) -// if (result.size > 1) { -// //throw new Exception(s"Memory load resulted in multiple regions ${result} for mem load $memoryLoad") -// addMergableRegions(result) -// } - m = m ++ result - m - case _ => s + result + case assign: Assign => + stackDetection(assign) + val unwrapped = unwrapExpr(assign.rhs) + if (unwrapped.isDefined) { + eval(unwrapped.get.index, s, assign, unwrapped.get.size) + // if (result.size > 1) { + // //throw new Exception(s"Memory load resulted in multiple regions ${result} for mem load $memoryLoad") + // addMergableRegions(result) + // } + } else { + Set() } - case _ => s // ignore other kinds of nodes + case _ => s } -// def localTransfer2(n: CFGPosition, s: Set[StackRegion]): Set[StackRegion] = n match { -// case cmd: Command => -// cmd match { -// case directCall: DirectCall => -// if (directCall.target.name == "malloc") { -// evaluateExpression(mallocVariable, constantProp(n)) match { -// case Some(b: BitVecLiteral) => -// val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value -// val newHeapRegion = HeapRegion(nextMallocCount(), negB, IRWalk.procedure(n)) -// addReturnHeap(directCall, newHeapRegion) -// s -// case None => s -// } -// } else { -// s -// } -// case memAssign: MemoryAssign => -// val evaluation = evaluateExpression(memAssign.index, constantProp(n)) -// if (evaluation.isDefined) { -// val isGlobal = mmm.findDataObject(evaluation.get.value) -// if (isGlobal.isEmpty) { -// val result = poolMaster(Long.MaxValue - evaluation.get.value, IRWalk.procedure(n), memAssign.size) -// return Set(result) -// } -// } -// s -// case assign: Assign => -// var m = Set[StackRegion]() -// unwrapExpr(assign.rhs).foreach { -// case memoryLoad: MemoryLoad => -// val evaluation = evaluateExpression(memoryLoad.index, constantProp(n)) -// if (evaluation.isDefined) { -// val isGlobal = mmm.findDataObject(evaluation.get.value) -// if (isGlobal.isEmpty) { -// val result = poolMaster(Long.MaxValue - evaluation.get.value, IRWalk.procedure(n), memoryLoad.size) -// m = m + result -// } -// } -// case _ => m -// } -// m -// case _ => s -// } -// case _ => s // ignore other kinds of nodes -// } - def transfer(n: CFGPosition, s: Set[StackRegion]): Set[StackRegion] = localTransfer(n, s) } diff --git a/src/main/scala/analysis/ReachingDefs.scala b/src/main/scala/analysis/ReachingDefs.scala index 6d6368c39..5a2bb2f2a 100644 --- a/src/main/scala/analysis/ReachingDefs.scala +++ b/src/main/scala/analysis/ReachingDefs.scala @@ -7,24 +7,26 @@ abstract class ReachingDefs(program: Program, writesTo: Map[Procedure, Set[Regis val mallocRegister = Register("R0", 64) val domain: Set[CFGPosition] = computeDomain(IntraProcIRCursor, program.procedures).toSet - val lattice: MapLattice[CFGPosition, Map[Variable, Set[CFGPosition]], MapLattice[Variable, Set[CFGPosition], PowersetLattice[CFGPosition]]] = new MapLattice(new MapLattice(new PowersetLattice[CFGPosition]())) + val lattice: MapLattice[CFGPosition, Map[Variable, Set[CFGPosition]], MapLattice[Variable, Set[CFGPosition], PowersetLattice[CFGPosition]]] = MapLattice(MapLattice(PowersetLattice[CFGPosition]())) - def transfer(n: CFGPosition, s: Map[Variable, Set[CFGPosition]]): Map[Variable, Set[CFGPosition]] = - n match - case loc:Assign => + def transfer(n: CFGPosition, s: Map[Variable, Set[CFGPosition]]): Map[Variable, Set[CFGPosition]] = { + n match { + case loc: Assign => s + (loc.lhs -> Set(n)) case DirectCall(target, _) if target.name == "malloc" => s + (mallocRegister -> Set(n)) case DirectCall(target, _) if writesTo.contains(target) => - val result: Map[Variable, Set[CFGPosition]] = writesTo(target).foldLeft(Map[Variable, Set[CFGPosition]]()){ + val result: Map[Variable, Set[CFGPosition]] = writesTo(target).foldLeft(Map[Variable, Set[CFGPosition]]()) { (m, register) => m + (register -> Set(n)) } s ++ result case _ => s + } + } } -class ReachingDefsAnalysis(program: Program, writesTo: Map[Procedure, Set[Register]]) extends ReachingDefs(program, writesTo), IRIntraproceduralForwardDependencies, +class ReachingDefsAnalysis(program: Program, writesTo: Map[Procedure, Set[Register]]) extends ReachingDefs(program, writesTo), IRIntraproceduralForwardDependencies, SimplePushDownWorklistFixpointSolver[CFGPosition, Map[Variable, Set[CFGPosition]], MapLattice[Variable, Set[CFGPosition], PowersetLattice[CFGPosition]]] diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index 92ded6b71..3cc414f06 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -14,11 +14,9 @@ import scala.collection.mutable.ArrayBuffer class MergedRegion(var name: String, val subregions: mutable.Set[MemoryRegion]) class RegionInjector(program: Program, mmm: MemoryModelMap) { - private val stackPointer = Register("R31", 64) - - val accessToRegion = mutable.Map[Statement, Set[MemoryRegion]]() - val loadToMemory = mutable.Map[Statement, Memory]() - val mergedRegions = mutable.Map[MemoryRegion, MergedRegion]() + private val accessToRegion = mutable.Map[Statement, Set[MemoryRegion]]() + private val loadToMemory = mutable.Map[Statement, Memory]() + val mergedRegions: mutable.Map[MemoryRegion, MergedRegion] = mutable.Map() def nodeVisitor(): Unit = { // visit reachable procedures @@ -113,7 +111,7 @@ class RegionInjector(program: Program, mmm: MemoryModelMap) { } } - def statementToRegions(n: Statement): Set[MemoryRegion] = { + private def statementToRegions(n: Statement): Set[MemoryRegion] = { mmm.getStack(n) ++ mmm.getData(n) } diff --git a/src/main/scala/analysis/UtilMethods.scala b/src/main/scala/analysis/UtilMethods.scala index 43ebc6f3a..2f74b65e8 100644 --- a/src/main/scala/analysis/UtilMethods.scala +++ b/src/main/scala/analysis/UtilMethods.scala @@ -153,25 +153,6 @@ def getUse(variable: Variable, node: CFGPosition, reachingDefs: Map[CFGPosition, out.getOrElse(variable, Set()) } -/** - * In expressions that have accesses within a region, we need to relocate - * the base address to the actual address using the relocation table. - * MUST RELOCATE because MMM iterate to find the lowest address - * TODO: May need to iterate over the relocation table to find the actual address - * - * @param address - * @param globalOffsets - * @return BitVecLiteral: the relocated address - */ -def relocatedBase(address: BitVecLiteral, globalOffsets: Map[BigInt, BigInt]): BitVecLiteral = { - val tableAddress = globalOffsets.getOrElse(address.value, address.value) - // this condition checks if the address is not layered and returns if it is not - if (tableAddress != address.value && !globalOffsets.contains(tableAddress)) { - return address - } - BitVecLiteral(tableAddress, address.size) -} - def unwrapExpr(expr: Expr): Option[MemoryLoad] = { expr match { case e: Extract => unwrapExpr(e.body) diff --git a/src/main/scala/analysis/VSA.scala b/src/main/scala/analysis/VSA.scala index 7c03e27f6..37d503238 100644 --- a/src/main/scala/analysis/VSA.scala +++ b/src/main/scala/analysis/VSA.scala @@ -21,11 +21,9 @@ case class LiteralValue(expr: BitVecLiteral) extends Value { override def toString: String = "Literal(" + expr + ")" } -trait ValueSetAnalysis(domain: Set[CFGPosition], - program: Program, - mmm: MemoryModelMap, - constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]) { +trait ValueSetAnalysis(program: Program, + mmm: MemoryModelMap, + constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]]) { val powersetLattice: PowersetLattice[Value] = PowersetLattice() @@ -35,21 +33,10 @@ trait ValueSetAnalysis(domain: Set[CFGPosition], val lattice: MapLattice[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]], LiftLattice[Map[Variable | MemoryRegion, Set[Value]], mapLattice.type]] = MapLattice(liftedLattice) - val first: Set[CFGPosition] = Set.empty + program.mainProcedure + val first: Set[CFGPosition] = Set(program.mainProcedure) private val mallocVariable = Register("R0", 64) - def nodeToRegion(n: CFGPosition): Set[MemoryRegion] = { - var returnRegions = Set.empty[MemoryRegion] - n match { - case directCall: DirectCall => - returnRegions = returnRegions + mmm.getHeap(directCall).asInstanceOf[MemoryRegion] - case _ => - returnRegions = returnRegions ++ mmm.getStack(n).asInstanceOf[Set[MemoryRegion]] ++ mmm.getData(n).asInstanceOf[Set[MemoryRegion]] - } - returnRegions - } - def canCoerceIntoDataRegion(bitVecLiteral: BitVecLiteral, size: Int): Option[DataRegion] = { mmm.findDataObject(bitVecLiteral.value) } @@ -57,77 +44,75 @@ trait ValueSetAnalysis(domain: Set[CFGPosition], /** Default implementation of eval. */ def eval(cmd: Command, s: Map[Variable | MemoryRegion, Set[Value]], n: CFGPosition): Map[Variable | MemoryRegion, Set[Value]] = { - var m = s cmd match case directCall: DirectCall if directCall.target.name == "malloc" => - val regions = nodeToRegion(n) + val regions = mmm.nodeToRegion(n) // malloc variable - m = m + (mallocVariable -> regions.map(r => AddressValue(r))) - m + s + (mallocVariable -> regions.map(r => AddressValue(r))) case localAssign: Assign => - val regions = nodeToRegion(n) + val regions = mmm.nodeToRegion(n) if (regions.nonEmpty) { - m = m + (localAssign.lhs -> regions.map(r => AddressValue(r))) + s + (localAssign.lhs -> regions.map(r => AddressValue(r))) } else { - evaluateExpression(localAssign.rhs, constantProp(n)) match + evaluateExpression(localAssign.rhs, constantProp(n)) match { case Some(bitVecLiteral: BitVecLiteral) => val possibleData = canCoerceIntoDataRegion(bitVecLiteral, 1) - if (possibleData.isDefined) { - m = m + (localAssign.lhs -> Set(AddressValue(possibleData.get))) - } else { - m = m + (localAssign.lhs -> Set(LiteralValue(bitVecLiteral))) - } + if (possibleData.isDefined) { + s + (localAssign.lhs -> Set(AddressValue(possibleData.get))) + } else { + s + (localAssign.lhs -> Set(LiteralValue(bitVecLiteral))) + } case None => val unwrapValue = unwrapExprToVar(localAssign.rhs) unwrapValue match { case Some(v: Variable) => - m = m + (localAssign.lhs -> m(v)) + s + (localAssign.lhs -> s(v)) case None => Logger.debug(s"Too Complex: ${localAssign.rhs}") // do nothing + s } + } } - m case memAssign: MemoryAssign => - val regions = nodeToRegion(n) - evaluateExpression(memAssign.value, constantProp(n)) match + val regions = mmm.nodeToRegion(n) + evaluateExpression(memAssign.value, constantProp(n)) match { case Some(bitVecLiteral: BitVecLiteral) => - regions.foreach { r => - val possibleData = canCoerceIntoDataRegion(bitVecLiteral, memAssign.size) - if (possibleData.isDefined) { - m = m + (r -> Set(AddressValue(possibleData.get))) - } else { - m = m + (r -> Set(LiteralValue(bitVecLiteral))) - } + val possibleData = canCoerceIntoDataRegion(bitVecLiteral, memAssign.size) + if (possibleData.isDefined) { + s ++ regions.map(r => r -> Set(AddressValue(possibleData.get))) + } else { + s ++ regions.map(r => r -> Set(LiteralValue(bitVecLiteral))) } case None => val unwrapValue = unwrapExprToVar(memAssign.value) unwrapValue match { case Some(v: Variable) => - regions.foreach { r => - m = m + (r -> m(v)) - } + s ++ regions.map(r => r -> s(v)) case None => Logger.debug(s"Too Complex: $memAssign.value") // do nothing + s } - m + } case _ => - m + s } /** Transfer function for state lattice elements. */ - def localTransfer(n: CFGPosition, s: Map[Variable | MemoryRegion, Set[Value]]): Map[Variable | MemoryRegion, Set[Value]] = - if (IRWalk.procedure(n) == n) { - mmm.pushContext(n.asInstanceOf[Procedure].name) - s - } else if (IRWalk.lastInProc(IRWalk.procedure(n)) == n) { - mmm.popContext() - s - } else n match + def localTransfer(n: CFGPosition, s: Map[Variable | MemoryRegion, Set[Value]]): Map[Variable | MemoryRegion, Set[Value]] = { + n match { + case p: Procedure => + mmm.pushContext(p.name) + s + case _: Return => + mmm.popContext() + s case command: Command => eval(command, s, n) case _ => s + } + } /** Transfer function for state lattice elements. (Same as `localTransfer` for simple value analysis.) */ @@ -135,12 +120,10 @@ trait ValueSetAnalysis(domain: Set[CFGPosition], } class ValueSetAnalysisSolver( - domain: Set[CFGPosition], program: Program, mmm: MemoryModelMap, constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])] -) extends ValueSetAnalysis(domain, program, mmm, constantProp, reachingDefs) +) extends ValueSetAnalysis(program, mmm, constantProp) with IRIntraproceduralForwardDependencies with Analysis[Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]] with WorklistFixpointSolverWithReachability[CFGPosition, Map[Variable | MemoryRegion, Set[Value]], MapLattice[Variable | MemoryRegion, Set[Value], PowersetLattice[Value]]] { diff --git a/src/main/scala/ir/Program.scala b/src/main/scala/ir/Program.scala index be22c5408..5ff441799 100644 --- a/src/main/scala/ir/Program.scala +++ b/src/main/scala/ir/Program.scala @@ -473,9 +473,9 @@ case class MemorySection(name: String, address: BigInt, size: Int, bytes: Seq[Bi for (i <- 0 until num) yield { val index = startIndex + i if (index >= bytes.size || index < 0) { - throw Exception("www" + num) + throw Exception(s"can't get $num bytes from section $name with size $size starting at index $startIndex (access address $addr)") } - bytes(startIndex + i) + bytes(index) } } diff --git a/src/main/scala/ir/transforms/IndirectCallResolution.scala b/src/main/scala/ir/transforms/IndirectCallResolution.scala index c4c9d894b..1aee9a483 100644 --- a/src/main/scala/ir/transforms/IndirectCallResolution.scala +++ b/src/main/scala/ir/transforms/IndirectCallResolution.scala @@ -1,245 +1,139 @@ package ir.transforms -import scala.collection.mutable.ListBuffer -import scala.collection.mutable.ArrayBuffer -import analysis.solvers.* -import analysis.* -import bap.* +import analysis.{AddressValue, DataRegion, Lift, LiftedElement, LiteralValue, MemoryRegion, RegisterWrapperEqualSets, StackRegion, Value, getUse} import ir.* -import translating.* import util.Logger -import util.intrusive_list.IntrusiveList -import scala.collection.mutable -import cilvisitor._ -def resolveIndirectCallsUsingPointsTo( - pointsTos: Map[RegisterWrapperEqualSets | MemoryRegion, Set[RegisterWrapperEqualSets | MemoryRegion]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - IRProgram: Program -): Boolean = { - var modified: Boolean = false - val worklist = ListBuffer[CFGPosition]() +import scala.collection.mutable +import scala.collection.mutable.{ArrayBuffer, ListBuffer} - worklist.addAll(IRProgram) - val visited = mutable.Set[CFGPosition]() - while (worklist.nonEmpty) { - val node = worklist.remove(0) - if (!visited.contains(node)) { - // add to worklist before we delete the node and can no longer find its successors - InterProcIRCursor.succ(node).foreach(node => worklist.addOne(node)) - process(node) - visited.add(node) - } - } +class SteensgaardIndirectCallResolution( + override val program: Program, + val pointsTos: Map[RegisterWrapperEqualSets | MemoryRegion, Set[RegisterWrapperEqualSets | MemoryRegion]], + val reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])] +) extends IndirectCallResolution { - def searchRegion(region: MemoryRegion): mutable.Set[String] = { - val result = mutable.Set[String]() + private def searchRegion(region: MemoryRegion): Set[String] = { region match { case stackRegion: StackRegion => if (pointsTos.contains(stackRegion)) { - for (c <- pointsTos(stackRegion)) { - c match { - case registerWrapperEqualSets: RegisterWrapperEqualSets => - pointsTos(registerWrapperEqualSets).foreach { - case memoryRegion: MemoryRegion => - result.addAll(searchRegion(memoryRegion)) - case registerWrapperEqualSets: RegisterWrapperEqualSets => throw Exception(s"possibly recursive points-to relation? should I handle this? $registerWrapperEqualSets") - } - case memoryRegion: MemoryRegion => - //result.addAll(searchRegion(memoryRegion)) - result.add(memoryRegion.regionIdentifier) // TODO: fix me - } + pointsTos(stackRegion).flatMap { + case registerWrapperEqualSets: RegisterWrapperEqualSets => + pointsTos(registerWrapperEqualSets).flatMap { + case memoryRegion: MemoryRegion => + searchRegion(memoryRegion) + case registerWrapperEqualSets: RegisterWrapperEqualSets => + throw Exception(s"possibly recursive points-to relation? should I handle this? $registerWrapperEqualSets") + } + case memoryRegion: MemoryRegion => + //searchRegion(memoryRegion) + Set(memoryRegion.regionIdentifier) // TODO: fix me } + } else { + Set() } - result case dataRegion: DataRegion => if (!pointsTos.contains(dataRegion) || pointsTos(dataRegion).isEmpty) { - result.add(dataRegion.regionIdentifier) + Set(dataRegion.regionIdentifier) } else { - result.add(dataRegion.regionIdentifier) // TODO: may need to investigate if we should add the parent region - for (c <- pointsTos(dataRegion)) { - c match { - case registerWrapperEqualSets: RegisterWrapperEqualSets => - pointsTos(registerWrapperEqualSets).foreach { - case memoryRegion: MemoryRegion => - result.addAll(searchRegion(memoryRegion)) - case registerWrapperEqualSets: RegisterWrapperEqualSets => throw Exception(s"possibly recursive points-to relation? should I handle this? $registerWrapperEqualSets") - } - case memoryRegion: MemoryRegion => - //result.addAll(searchRegion(memoryRegion)) - result.add(memoryRegion.regionIdentifier) // TODO: fix me - } + val names: Set[String] = pointsTos(dataRegion).flatMap { + case registerWrapperEqualSets: RegisterWrapperEqualSets => + pointsTos(registerWrapperEqualSets).flatMap { + case memoryRegion: MemoryRegion => + searchRegion(memoryRegion) + case registerWrapperEqualSets: RegisterWrapperEqualSets => + throw Exception(s"possibly recursive points-to relation? should I handle this? $registerWrapperEqualSets") + } + case memoryRegion: MemoryRegion => + //searchRegion(memoryRegion)) + Set(memoryRegion.regionIdentifier) // TODO: fix me } + names + dataRegion.regionIdentifier // TODO: may need to investigate if we should add the parent region } - result } } - def addFakeProcedure(name: String): Procedure = { - val newProcedure = Procedure(name) - IRProgram.procedures += newProcedure - newProcedure - } - - def resolveAddresses(variable: Variable, i: IndirectCall): mutable.Set[String] = { - val names = mutable.Set[String]() + override def resolveAddresses(variable: Variable, i: IndirectCall): Set[String] = { val variableWrapper = RegisterWrapperEqualSets(variable, getUse(variable, i, reachingDefs)) pointsTos.get(variableWrapper) match { - case Some(value) => - value.map { - case v: RegisterWrapperEqualSets => names.addAll(resolveAddresses(v.variable, i)) - case m: MemoryRegion => names.addAll(searchRegion(m)) + case Some(values) => + values.flatMap { + case v: RegisterWrapperEqualSets => resolveAddresses(v.variable, i) + case m: MemoryRegion => searchRegion(m) } - names - case None => names + case None => Set() } } - def process(n: CFGPosition): Unit = n match { - case indirectCall: IndirectCall if indirectCall.target != Register("R30", 64) => - if (!indirectCall.hasParent) { - // skip if we have already processesd this call - return - } - // we need the single-call-at-end-of-block invariant - assert(indirectCall.parent.statements.lastOption.contains(indirectCall)) - - val block = indirectCall.parent - val procedure = block.parent - - val targetNames = resolveAddresses(indirectCall.target, indirectCall) - Logger.debug(s"Points-To approximated call ${indirectCall.target} with $targetNames") - Logger.debug(IRProgram.procedures) - val targets: mutable.Set[Procedure] = - targetNames.map(name => IRProgram.procedures.find(_.name == name).getOrElse(addFakeProcedure(name))) - - if (targets.nonEmpty) { - Logger.debug(s"Resolved indirect call $indirectCall") - } - - if (targets.size == 1) { - modified = true - - val newCall = DirectCall(targets.head, indirectCall.label) - block.statements.replace(indirectCall, newCall) - } else if (targets.size > 1) { - - val oft = indirectCall.parent.jump - - modified = true - val newBlocks = ArrayBuffer[Block]() - for (t <- targets) { - Logger.debug(targets) - val address = t.address.match { - case Some(a) => a - case None => - throw Exception(s"resolved indirect call $indirectCall to procedure which does not have address: $t") - } - val assume = Assume(BinaryExpr(BVEQ, indirectCall.target, BitVecLiteral(address, 64))) - val newLabel: String = block.label + t.name - val directCall = DirectCall(t) +} - /* copy the goto node resulting */ - val fallthrough = oft match { - case g: GoTo => GoTo(g.targets, g.label) - case h: Unreachable => Unreachable() - case r: Return => Return() +class VSAIndirectCallResolution( + override val program: Program, + val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] +) extends IndirectCallResolution { + + private def searchRegion(memoryRegion: MemoryRegion, n: CFGPosition): Set[String] = { + val names = vsaResult.get(n) match { + case Some(Lift(el)) => el.get(memoryRegion) match { + case Some(values) => + values.flatMap { + case addressValue: AddressValue => searchRegion(addressValue.region, n) + case _ => Set() } - newBlocks.append(Block(newLabel, None, ArrayBuffer(assume, directCall), fallthrough)) - } - block.statements.remove(indirectCall) - procedure.addBlocks(newBlocks) - val newCall = GoTo(newBlocks, indirectCall.label) - block.replaceJump(newCall) + case _ => Set() } - case _ => - } - - modified -} - - -def resolveIndirectCallsUsingVSA( - vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], - IRProgram: Program - ): Boolean = { - var modified: Boolean = false - val worklist = ListBuffer[CFGPosition]() - - worklist.addAll(IRProgram) - - val visited = mutable.Set[CFGPosition]() - while (worklist.nonEmpty) { - val node = worklist.remove(0) - if (!visited.contains(node)) { - // add to worklist before we delete the node and can no longer find its successors - InterProcIRCursor.succ(node).foreach(node => worklist.addOne(node)) - process(node) - visited.add(node) + case _ => Set() } - } - - def addFakeProcedure(name: String): Procedure = { - val newProcedure = Procedure(name) - IRProgram.procedures += newProcedure - newProcedure - } - - def searchRegion(memoryRegion: MemoryRegion, n: CFGPosition): mutable.Set[String] = { - val names = mutable.Set[String]() memoryRegion match { - case stackRegion: StackRegion => - vsaResult.get(n) match - case Some(value) => value match - case Lift(el) => el.get(stackRegion) match - case Some(value) => value.map { - case addressValue: AddressValue => names.addAll(searchRegion(addressValue.region, n)) - case literalValue: LiteralValue => - } - case None => - case LiftedBottom => - case _ => - case None => + case _: StackRegion => + names case dataRegion: DataRegion => - names.addAll(dataRegion.relfContent) - vsaResult.get(n) match - case Some(value) => value match - case Lift(el) => el.get(dataRegion) match - case Some(value) => value.map { - case addressValue: AddressValue => names.addAll(searchRegion(addressValue.region, n)) - case literalValue: LiteralValue => - } - case None => - case LiftedBottom => - case _ => - case None => + names ++ dataRegion.relfContent + case _ => + Set() } - names } - def resolveAddresses(variable: Variable, i: IndirectCall): mutable.Set[String] = { - val names = mutable.Set[String]() - vsaResult.get(i) match - case Some(value) => value match - case Lift(el) => el.get(variable) match - case Some(value) => value.map { - case addressValue: AddressValue => names.addAll(searchRegion(addressValue.region, i)) - case literalValue: LiteralValue => - } - case None => - case LiftedBottom => - case _ => - case None => - names + override def resolveAddresses(variable: Variable, i: IndirectCall): Set[String] = { + vsaResult.get(i) match { + case Some(Lift(el)) => el.get(variable) match { + case Some(values) => + values.flatMap { + case addressValue: AddressValue => searchRegion(addressValue.region, i) + case _: LiteralValue => Set() + } + case _ => Set() + } + case _ => Set() + } } +} - def process(n: CFGPosition): Unit = n match { - case indirectCall: IndirectCall if indirectCall.target != Register("R30", 64) => - if (!indirectCall.hasParent) { - // skip if we have already processesd this call - return +trait IndirectCallResolution { + val program: Program + + def resolveIndirectCalls(): Boolean = { + var modified = false + val worklist = ListBuffer[CFGPosition]() + worklist.addAll(program) + + val visited = mutable.Set[CFGPosition]() + while (worklist.nonEmpty) { + val node = worklist.remove(0) + if (!visited.contains(node)) { + // add to worklist before we delete the node and can no longer find its successors + InterProcIRCursor.succ(node).foreach(node => worklist.addOne(node)) + modified = process(node) || modified + visited.add(node) } + } + modified + } + + // returns whether or not the program was modified + def process(n: CFGPosition): Boolean = n match { + case indirectCall: IndirectCall if indirectCall.target != Register("R30", 64) && indirectCall.hasParent => // we need the single-call-at-end-of-block invariant assert(indirectCall.parent.statements.lastOption.contains(indirectCall)) @@ -247,25 +141,22 @@ def resolveIndirectCallsUsingVSA( val procedure = block.parent val targetNames = resolveAddresses(indirectCall.target, indirectCall) - Logger.debug(s"VSA approximated call ${indirectCall.target} with $targetNames") - Logger.debug(IRProgram.procedures) - val targets: mutable.Set[Procedure] = - targetNames.map(name => IRProgram.procedures.find(_.name == name).getOrElse(addFakeProcedure(name))) + Logger.debug(s"approximated call ${indirectCall.target} with $targetNames") + Logger.debug(program.procedures) + val targets: Set[Procedure] = + targetNames.map(name => program.procedures.find(_.name == name).getOrElse(addFakeProcedure(name))) if (targets.nonEmpty) { Logger.debug(s"Resolved indirect call $indirectCall") } if (targets.size == 1) { - modified = true - val newCall = DirectCall(targets.head, indirectCall.label) block.statements.replace(indirectCall, newCall) + true } else if (targets.size > 1) { val oft = indirectCall.parent.jump - - modified = true val newBlocks = ArrayBuffer[Block]() for (t <- targets) { Logger.debug(targets) @@ -280,9 +171,9 @@ def resolveIndirectCallsUsingVSA( /* copy the goto node resulting */ val fallthrough = oft match { - case g: GoTo => GoTo(g.targets, g.label) - case h: Unreachable => Unreachable() - case r: Return => Return() + case g: GoTo => GoTo(g.targets, g.label) + case _: Unreachable => Unreachable() + case _: Return => Return() } newBlocks.append(Block(newLabel, None, ArrayBuffer(assume, directCall), fallthrough)) } @@ -290,9 +181,20 @@ def resolveIndirectCallsUsingVSA( procedure.addBlocks(newBlocks) val newCall = GoTo(newBlocks, indirectCall.label) block.replaceJump(newCall) + true + } else { + false } case _ => + false } - modified + def addFakeProcedure(name: String): Procedure = { + val newProcedure = Procedure(name) + program.procedures += newProcedure + newProcedure + } + + def resolveAddresses(variable: Variable, i: IndirectCall): Set[String] + } \ No newline at end of file diff --git a/src/main/scala/ir/transforms/SplitThreads.scala b/src/main/scala/ir/transforms/SplitThreads.scala index ef9e0b853..8123c0ae1 100644 --- a/src/main/scala/ir/transforms/SplitThreads.scala +++ b/src/main/scala/ir/transforms/SplitThreads.scala @@ -1,65 +1,46 @@ package ir.transforms -import scala.collection.mutable.ListBuffer -import scala.collection.mutable.ArrayBuffer -import analysis.solvers.* -import analysis.* -import bap.* +import analysis.{DataRegion, MemoryRegion, RegisterWrapperEqualSets, getDefinition} import ir.* -import translating.* -import util.Logger -import java.util.Base64 -import spray.json.DefaultJsonProtocol.* -import util.intrusive_list.IntrusiveList + import scala.collection.mutable -import cilvisitor._ -// identify calls to pthread_create -// use analysis result to determine the third parameter's value (the function pointer) -// split off that procedure into new thread -// do reachability analysis -// also need a bit in the IR where it creates separate files + def splitThreads(program: Program, pointsTo: Map[RegisterWrapperEqualSets, Set[RegisterWrapperEqualSets | MemoryRegion]], - regionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])] ): Unit = { - // iterate over all commands - if call is to pthread_create, look up? - program.foreach(c => - c match { - case d: DirectCall if d.target.name == "pthread_create" => - - // R2 should hold the function pointer of the function that begins the thread - // look up R2 value using points to results - val R2 = Register("R2", 64) - val b = reachingDefs(d) - val R2Wrapper = RegisterWrapperEqualSets(R2, getDefinition(R2, d, reachingDefs)) - val threadTargets = pointsTo(R2Wrapper) - - if (threadTargets.size > 1) { - // currently can't handle case where the thread created is ambiguous - throw Exception("can't handle thread creation with more than one possible target") - } + // iterate over all commands - if call is to pthread_create, look up + program.foreach { + case d: DirectCall if d.target.name == "pthread_create" => + // R2 should hold the function pointer of the function that begins the thread + // look up R2 value using points to results + val R2 = Register("R2", 64) + val R2Wrapper = RegisterWrapperEqualSets(R2, getDefinition(R2, d, reachingDefs)) + val threadTargets = pointsTo(R2Wrapper) - if (threadTargets.size == 1) { + if (threadTargets.size > 1) { + // currently can't handle case where the thread created is ambiguous + throw Exception("can't handle thread creation with more than one possible target") + } - // not trying to untangle the very messy region resolution at present, just dealing with simplest case - threadTargets.head match { - case data: DataRegion => - val threadEntrance = program.procedures.find(_.name == data.regionIdentifier) match { - case Some(proc) => proc - case None => throw Exception("could not find procedure with name " + data.regionIdentifier) - } - val thread = ProgramThread(threadEntrance, mutable.LinkedHashSet(threadEntrance), Some(d)) - program.threads.addOne(thread) - case _ => - throw Exception("unexpected non-data region " + threadTargets.head + " as PointsTo result for R2 at " + d) - } + if (threadTargets.size == 1) { + // not trying to untangle the very messy region resolution at present, just dealing with simplest case + threadTargets.head match { + case data: DataRegion => + val threadEntrance = program.procedures.find(_.name == data.regionIdentifier) match { + case Some(proc) => proc + case None => throw Exception("could not find procedure with name " + data.regionIdentifier) + } + val thread = ProgramThread(threadEntrance, mutable.LinkedHashSet(threadEntrance), Some(d)) + program.threads.addOne(thread) + case _ => + throw Exception("unexpected non-data region " + threadTargets.head + " as PointsTo result for R2 at " + d) } - case _ => - }) - + } + case _ => + } if (program.threads.nonEmpty) { val mainThread = ProgramThread(program.mainProcedure, mutable.LinkedHashSet(program.mainProcedure), None) diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index e98b6ccda..49c3e4cdd 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -60,7 +60,6 @@ case class StaticAnalysisContext( paramResults: Map[Procedure, Set[Variable]], steensgaardResults: Map[RegisterWrapperEqualSets, Set[RegisterWrapperEqualSets | MemoryRegion]], mmmResults: MemoryModelMap, - memoryRegionContents: Map[MemoryRegion, Set[BitVecLiteral | MemoryRegion]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], varDepsSummaries: Map[Procedure, Map[Taintable, Set[Taintable]]], regionInjector: RegionInjector, @@ -381,9 +380,10 @@ object StaticAnalysis { val mmm = MemoryModelMap(globalOffsets) mmm.preLoadGlobals(mergedSubroutines, globalAddresses, globalSizes) - var previousVSAResults = Option.empty[Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]] - if (previousResults.isDefined) { - previousVSAResults = Some(previousResults.get.vsaResult) + val previousVSAResults = if (previousResults.isDefined) { + previousResults.get.vsaResult + } else { + Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]() } Logger.debug("[!] Running GRA") @@ -418,18 +418,16 @@ object StaticAnalysis { }) Logger.debug("[!] Running MMM") - mmm.convertMemoryRegions(mraSolver.procedureToStackRegions, mraSolver.procedureToHeapRegions, mraSolver.mergeRegions, mraResult, mraSolver.procedureToSharedRegions, graSolver.getDataMap, graResult) + mmm.convertMemoryRegions(mraSolver.procedureToStackRegions, mraSolver.procedureToHeapRegions, mraResult, mraSolver.procedureToSharedRegions, graSolver.getDataMap, graResult) mmm.logRegions() Logger.debug("[!] Running Steensgaard") - val steensgaardSolver = InterprocSteensgaardAnalysis(interDomain.toSet, constPropResult, mmm, reachingDefinitionsAnalysisResults, globalOffsets, previousVSAResults) + val steensgaardSolver = InterprocSteensgaardAnalysis(interDomain.toSet, mmm, reachingDefinitionsAnalysisResults, previousVSAResults) steensgaardSolver.analyze() val steensgaardResults = steensgaardSolver.pointsTo() - val memoryRegionContents = steensgaardSolver.getMemoryRegionContents - mmm.logRegions(memoryRegionContents) Logger.debug("[!] Running VSA") - val vsaSolver = ValueSetAnalysisSolver(domain.toSet, IRProgram, mmm, constPropResult, reachingDefinitionsAnalysisResults) + val vsaSolver = ValueSetAnalysisSolver(IRProgram, mmm, constPropResult) val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = vsaSolver.analyze() config.analysisDotPath.foreach(s => { @@ -455,7 +453,6 @@ object StaticAnalysis { paramResults = paramResults, steensgaardResults = steensgaardResults, mmmResults = mmm, - memoryRegionContents = memoryRegionContents, symbolicAddresses = Map.empty, reachingDefs = reachingDefinitionsAnalysisResults, varDepsSummaries = varDepsSummaries, @@ -576,15 +573,19 @@ object RunUtils { val result = StaticAnalysis.analyse(ctx, config, iteration, analysisResult.lastOption) analysisResult.append(result) Logger.debug("[!] Replacing Indirect Calls") -// modified = transforms.resolveIndirectCallsUsingPointsTo( -// result.steensgaardResults, -// result.reachingDefs, -// ctx.program -// ) - modified = transforms.resolveIndirectCallsUsingVSA( - result.vsaResult, - ctx.program - ) + + /* + modified = transforms.SteensgaardIndirectCallResolution( + ctx.program, + result.steensgaardResults, + result.reachingDefs + ).resolveIndirectCalls() + */ + + modified = transforms.VSAIndirectCallResolution( + ctx.program, + result.vsaResult + ).resolveIndirectCalls() Logger.debug("[!] Generating Procedure Summaries") if (config.summariseProcedures) { @@ -600,7 +601,7 @@ object RunUtils { // should later move this to be inside while (modified) loop and have splitting threads cause further iterations if (config.threadSplit) { - transforms.splitThreads(ctx.program, analysisResult.last.steensgaardResults, analysisResult.last.memoryRegionContents, analysisResult.last.reachingDefs) + transforms.splitThreads(ctx.program, analysisResult.last.steensgaardResults, analysisResult.last.reachingDefs) } Logger.debug("[!] Running Writes To") @@ -623,7 +624,6 @@ object RunUtils { val dsa = DataStructureAnalysis(ctx.program, symResults, analysisResult.last.IRconstPropResult, symbolTableEntries, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, analysisResult.last.paramResults) dsa.analyze() - config.analysisDotPath.foreach { s => dsa.topDown(ctx.program.mainProcedure).toDot writeToFile(dsa.topDown(ctx.program.mainProcedure).toDot, s"${s}_main_dsg.dot") From dabf9f92005aeb2fe844463379c57da848fbd512 Mon Sep 17 00:00:00 2001 From: l-kent Date: Wed, 30 Oct 2024 10:55:45 +1000 Subject: [PATCH 097/104] remove mutability in DataRegion case class --- src/main/scala/analysis/MemoryModelMap.scala | 12 ++++++------ .../scala/ir/transforms/IndirectCallResolution.scala | 7 ++++--- src/main/scala/util/RunUtils.scala | 3 ++- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index 94f7b78a4..605ae0f53 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -37,6 +37,7 @@ class MemoryModelMap(val globalOffsets: Map[BigInt, BigInt]) { private val stackAllocationSites: mutable.Map[CFGPosition, Set[StackRegion]] = mutable.Map() private val uf = UnionFind() + val relfContent: mutable.Map[DataRegion, mutable.Set[String]] = mutable.Map() /** Add a range and object to the mapping * @@ -160,9 +161,9 @@ class MemoryModelMap(val globalOffsets: Map[BigInt, BigInt]) { } else { val isRelocated = relocatedDataRegion(dr.start) if (isRelocated.isDefined) { - obj.get.relfContent.add(isRelocated.get.regionIdentifier) + relfContent(obj.get) = relfContent.getOrElse(obj.get, mutable.Set()) += isRelocated.get.regionIdentifier } else { - obj.get.relfContent.add(dr.regionIdentifier) + relfContent(obj.get) = relfContent.getOrElse(obj.get, mutable.Set()) += dr.regionIdentifier } } } @@ -375,7 +376,7 @@ class MemoryModelMap(val globalOffsets: Map[BigInt, BigInt]) { n match { case directCall: DirectCall => Set(getHeap(directCall)) - case _ => + case _ => getStack(n) ++ getData(n) } } @@ -396,9 +397,8 @@ case class HeapRegion(override val regionIdentifier: String, override val start: } case class DataRegion(override val regionIdentifier: String, override val start: BigInt, size: BigInt) extends MemoryRegion { - override def toString: String = s"Data($regionIdentifier, $start, $size, ($relfContent))" - def end: BigInt = start + size - 1 - val relfContent: mutable.Set[String] = mutable.Set[String]() + override def toString: String = s"Data($regionIdentifier, $start, $size)" + val end: BigInt = start + size - 1 } class UnionFind { diff --git a/src/main/scala/ir/transforms/IndirectCallResolution.scala b/src/main/scala/ir/transforms/IndirectCallResolution.scala index 1aee9a483..a643062cb 100644 --- a/src/main/scala/ir/transforms/IndirectCallResolution.scala +++ b/src/main/scala/ir/transforms/IndirectCallResolution.scala @@ -1,6 +1,6 @@ package ir.transforms -import analysis.{AddressValue, DataRegion, Lift, LiftedElement, LiteralValue, MemoryRegion, RegisterWrapperEqualSets, StackRegion, Value, getUse} +import analysis.{AddressValue, DataRegion, Lift, LiftedElement, LiteralValue, MemoryModelMap, MemoryRegion, RegisterWrapperEqualSets, StackRegion, Value, getUse} import ir.* import util.Logger @@ -70,7 +70,8 @@ class SteensgaardIndirectCallResolution( class VSAIndirectCallResolution( override val program: Program, - val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] + val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], + val mmm: MemoryModelMap ) extends IndirectCallResolution { private def searchRegion(memoryRegion: MemoryRegion, n: CFGPosition): Set[String] = { @@ -89,7 +90,7 @@ class VSAIndirectCallResolution( case _: StackRegion => names case dataRegion: DataRegion => - names ++ dataRegion.relfContent + names ++ mmm.relfContent.getOrElse(dataRegion, Set()) case _ => Set() } diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 49c3e4cdd..704f0dfda 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -584,7 +584,8 @@ object RunUtils { modified = transforms.VSAIndirectCallResolution( ctx.program, - result.vsaResult + result.vsaResult, + result.mmmResults ).resolveIndirectCalls() Logger.debug("[!] Generating Procedure Summaries") From af29cc18f578526ba7fac018829b861272f12eb7 Mon Sep 17 00:00:00 2001 From: l-kent Date: Wed, 30 Oct 2024 14:31:39 +1000 Subject: [PATCH 098/104] remove mutability in StackRegion case class --- src/main/scala/analysis/MemoryModelMap.scala | 11 +++++------ src/main/scala/analysis/MemoryRegionAnalysis.scala | 10 ++++++---- src/main/scala/util/RunUtils.scala | 2 +- 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/src/main/scala/analysis/MemoryModelMap.scala b/src/main/scala/analysis/MemoryModelMap.scala index 605ae0f53..0df82014c 100644 --- a/src/main/scala/analysis/MemoryModelMap.scala +++ b/src/main/scala/analysis/MemoryModelMap.scala @@ -38,6 +38,7 @@ class MemoryModelMap(val globalOffsets: Map[BigInt, BigInt]) { private val uf = UnionFind() val relfContent: mutable.Map[DataRegion, mutable.Set[String]] = mutable.Map() + val stackSubAccesses: mutable.Map[StackRegion, mutable.Set[BigInt]] = mutable.Map() /** Add a range and object to the mapping * @@ -51,10 +52,9 @@ class MemoryModelMap(val globalOffsets: Map[BigInt, BigInt]) { r match { case DataRegion(_, start, size) => start + size case _: HeapRegion => ??? - case StackRegion(_, start, _) => - if (r.subAccesses.nonEmpty) { - val max = start + r.subAccesses.max - max + case s: StackRegion => + if (stackSubAccesses.contains(s) && stackSubAccesses(s).nonEmpty) { + s.start + stackSubAccesses(s).max } else { ??? } @@ -385,11 +385,10 @@ class MemoryModelMap(val globalOffsets: Map[BigInt, BigInt]) { trait MemoryRegion { val regionIdentifier: String val start: BigInt - val subAccesses: mutable.Set[BigInt] = mutable.Set() } case class StackRegion(override val regionIdentifier: String, override val start: BigInt, parent: Procedure) extends MemoryRegion { - override def toString: String = s"Stack($regionIdentifier, $start, ${parent.name}, $subAccesses)" + override def toString: String = s"Stack($regionIdentifier, $start, ${parent.name}" } case class HeapRegion(override val regionIdentifier: String, override val start: BigInt, size: BigInt, parent: Procedure) extends MemoryRegion { diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index a37f6c7be..5f8e07560 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -17,7 +17,8 @@ trait MemoryRegionAnalysis(val program: Program, val ANRResult: Map[CFGPosition, Set[Variable]], val RNAResult: Map[CFGPosition, Set[Variable]], val reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - val graResult: Map[CFGPosition, Set[DataRegion]]) { + val graResult: Map[CFGPosition, Set[DataRegion]], + val mmm: MemoryModelMap) { private var mallocCount: BigInt = 0 private var stackCount: Int = 0 @@ -53,7 +54,7 @@ trait MemoryRegionAnalysis(val program: Program, stackPool += (base -> newRegion) newRegion } - region.subAccesses.add((subAccess.toDouble/8).ceil.toInt) + mmm.stackSubAccesses(region) = mmm.stackSubAccesses.getOrElse(region, mutable.Set()) += (subAccess.toDouble/8).ceil.toInt region } @@ -252,8 +253,9 @@ class MemoryRegionAnalysisSolver( ANRResult: Map[CFGPosition, Set[Variable]], RNAResult: Map[CFGPosition, Set[Variable]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - graResult: Map[CFGPosition, Set[DataRegion]] - ) extends MemoryRegionAnalysis(program, domain, globals, globalOffsets, subroutines, constantProp, ANRResult, RNAResult, reachingDefs, graResult) + graResult: Map[CFGPosition, Set[DataRegion]], + mmm: MemoryModelMap + ) extends MemoryRegionAnalysis(program, domain, globals, globalOffsets, subroutines, constantProp, ANRResult, RNAResult, reachingDefs, graResult, mmm) with IRIntraproceduralForwardDependencies with Analysis[Map[CFGPosition, Set[StackRegion]]] with SimpleWorklistFixpointSolver[CFGPosition, Set[StackRegion], PowersetLattice[StackRegion]] \ No newline at end of file diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 704f0dfda..e12698d9d 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -391,7 +391,7 @@ object StaticAnalysis { val graResult = graSolver.analyze() Logger.debug("[!] Running MRA") - val mraSolver = MemoryRegionAnalysisSolver(IRProgram, domain.toSet, globalAddresses, globalOffsets, mergedSubroutines, constPropResult, ANRResult, RNAResult, reachingDefinitionsAnalysisResults, graResult) + val mraSolver = MemoryRegionAnalysisSolver(IRProgram, domain.toSet, globalAddresses, globalOffsets, mergedSubroutines, constPropResult, ANRResult, RNAResult, reachingDefinitionsAnalysisResults, graResult, mmm) val mraResult = mraSolver.analyze() config.analysisDotPath.foreach(s => { From 250d9c754c0f7372a3d990ea45eaeb014fcbeacb Mon Sep 17 00:00:00 2001 From: l-kent Date: Wed, 30 Oct 2024 14:38:29 +1000 Subject: [PATCH 099/104] add --memory-regions flag to control whether regions are added to Boogie --- src/main/scala/Main.scala | 6 ++++-- src/main/scala/util/BASILConfig.scala | 3 ++- src/main/scala/util/RunUtils.scala | 13 +++++++++---- 3 files changed, 15 insertions(+), 7 deletions(-) diff --git a/src/main/scala/Main.scala b/src/main/scala/Main.scala index 498c6eacb..bdadc8968 100644 --- a/src/main/scala/Main.scala +++ b/src/main/scala/Main.scala @@ -49,7 +49,9 @@ object Main { @arg(name = "threads", short = 't', doc = "Separates threads into multiple .bpl files with given output filename as prefix (requires --analyse flag)") threadSplit: Flag, @arg(name = "summarise-procedures", doc = "Generates summaries of procedures which are used in pre/post-conditions (requires --analyse flag)") - summariseProcedures: Flag + summariseProcedures: Flag, + @arg(name = "memory-regions", doc = "Performs static analysis to separate memory into discrete regions in Boogie output (requires --analyse flag)") + memoryRegions: Flag ) def main(args: Array[String]): Unit = { @@ -82,7 +84,7 @@ object Main { val q = BASILConfig( loading = ILLoadingConfig(conf.inputFileName, conf.relfFileName, conf.specFileName, conf.dumpIL, conf.mainProcedureName, conf.procedureDepth), runInterpret = conf.interpret.value, - staticAnalysis = if conf.analyse.value then Some(StaticAnalysisConfig(conf.dumpIL, conf.analysisResults, conf.analysisResultsDot, conf.threadSplit.value, conf.summariseProcedures.value)) else None, + staticAnalysis = if conf.analyse.value then Some(StaticAnalysisConfig(conf.dumpIL, conf.analysisResults, conf.analysisResultsDot, conf.threadSplit.value, conf.summariseProcedures.value, conf.memoryRegions.value)) else None, boogieTranslation = BoogieGeneratorConfig(if conf.lambdaStores.value then BoogieMemoryAccessMode.LambdaStoreSelect else BoogieMemoryAccessMode.SuccessiveStoreSelect, true, rely, conf.threadSplit.value), outputPrefix = conf.outFileName, diff --git a/src/main/scala/util/BASILConfig.scala b/src/main/scala/util/BASILConfig.scala index b323e4026..9e4d22c66 100644 --- a/src/main/scala/util/BASILConfig.scala +++ b/src/main/scala/util/BASILConfig.scala @@ -19,7 +19,8 @@ case class StaticAnalysisConfig(dumpILToPath: Option[String] = None, analysisResultsPath: Option[String] = None, analysisDotPath: Option[String] = None, threadSplit: Boolean = false, - summariseProcedures: Boolean = false) + summariseProcedures: Boolean = false, + memoryRegions: Boolean = false) enum BoogieMemoryAccessMode: case SuccessiveStoreSelect, LambdaStoreSelect diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index e12698d9d..26a9afb34 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -62,7 +62,7 @@ case class StaticAnalysisContext( mmmResults: MemoryModelMap, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], varDepsSummaries: Map[Procedure, Map[Taintable, Set[Taintable]]], - regionInjector: RegionInjector, + regionInjector: Option[RegionInjector], symbolicAddresses: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], localDSA: Map[Procedure, Graph], bottomUpDSA: Map[Procedure, Graph], @@ -438,8 +438,13 @@ object StaticAnalysis { }) Logger.debug("[!] Injecting regions") - val regionInjector = RegionInjector(IRProgram, mmm) - regionInjector.nodeVisitor() + val regionInjector = if (config.memoryRegions) { + val injector = RegionInjector(IRProgram, mmm) + injector.nodeVisitor() + Some(injector) + } else { + None + } val paramResults: Map[Procedure, Set[Variable]] = ParamAnalysis(IRProgram).analyze() val interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]] = InterLiveVarsAnalysis(IRProgram).analyze() @@ -543,7 +548,7 @@ object RunUtils { Logger.debug("[!] Translating to Boogie") - val regionInjector = analysis.map(a => a.regionInjector) + val regionInjector = analysis.flatMap(a => a.regionInjector) val boogiePrograms = if (q.boogieTranslation.threadSplit && ctx.program.threads.nonEmpty) { val outPrograms = ArrayBuffer[BProgram]() From d1c0870484e2de29a1a8a32b2df39571cd6c3098 Mon Sep 17 00:00:00 2001 From: l-kent Date: Wed, 30 Oct 2024 14:45:44 +1000 Subject: [PATCH 100/104] don't do multiple iterations unless needed for regions --- src/main/scala/util/RunUtils.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 26a9afb34..899919d82 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -573,7 +573,7 @@ object RunUtils { var iteration = 1 var modified: Boolean = true val analysisResult = mutable.ArrayBuffer[StaticAnalysisContext]() - while (modified || analysisResult.size < 2) { + while (modified || (analysisResult.size < 2 && config.memoryRegions)) { Logger.debug("[!] Running Static Analysis") val result = StaticAnalysis.analyse(ctx, config, iteration, analysisResult.lastOption) analysisResult.append(result) From c6e5d8a4f2365c7509b5b66576c2fd07d8b34555 Mon Sep 17 00:00:00 2001 From: l-kent Date: Wed, 30 Oct 2024 14:57:59 +1000 Subject: [PATCH 101/104] fix issue with L function when analysis is off --- src/main/scala/translating/IRToBoogie.scala | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/main/scala/translating/IRToBoogie.scala b/src/main/scala/translating/IRToBoogie.scala index bd68813e7..1c917d038 100644 --- a/src/main/scala/translating/IRToBoogie.scala +++ b/src/main/scala/translating/IRToBoogie.scala @@ -405,7 +405,11 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti } */ IfThenElse(guard, LPred, ite) } - val params = (body.params - indexVar).toList.sorted + val params = if (regionInjector.isDefined) { + (body.params - indexVar).toList.sorted + } else { + List(BParam("mem$in", mem.bType)) + } BFunction("L", params :+ indexVar, BParam(BoolBType), Some(body), List(externAttr)) case b: ByteExtract => val valueVar = BParam("value", BitVecBType(b.valueSize)) From dc1668ed2f2fba334e134b6983306697e94e689d Mon Sep 17 00:00:00 2001 From: l-kent Date: Wed, 30 Oct 2024 15:17:22 +1000 Subject: [PATCH 102/104] fix issue with initialised memory and stack when analysis is enabled --- src/main/scala/util/RunUtils.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 899919d82..0dec78187 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -217,7 +217,7 @@ object IRTransform { * add in modifies from the spec. */ def prepareForTranslation(config: BASILConfig, ctx: IRContext): Unit = { - if (config.staticAnalysis.isEmpty) { + if (!config.staticAnalysis.isDefined || !config.staticAnalysis.get.memoryRegions) { ctx.program.determineRelevantMemory(ctx.globalOffsets) } @@ -227,7 +227,7 @@ object IRTransform { Logger.debug( s"[!] Removed ${before - ctx.program.procedures.size} functions (${ctx.program.procedures.size} remaining)" ) - val dupProcNames = (ctx.program.procedures.groupBy(_.name).filter((n,p) => p.size > 1)).toList.flatMap(_._2) + val dupProcNames = ctx.program.procedures.groupBy(_.name).filter((_, p) => p.size > 1).toList.flatMap(_(1)) var dupCounter = 0 for (p <- dupProcNames) { @@ -235,7 +235,7 @@ object IRTransform { p.name = p.name + "$" + p.address.map(_.toString).getOrElse(dupCounter.toString) } - if (config.staticAnalysis.isEmpty) { + if (!config.staticAnalysis.isDefined || !config.staticAnalysis.get.memoryRegions) { val stackIdentification = StackSubstituter() stackIdentification.visitProgram(ctx.program) } From 19dcdb095c17f149460d028a18a4469d2a791173 Mon Sep 17 00:00:00 2001 From: l-kent Date: Wed, 30 Oct 2024 15:27:51 +1000 Subject: [PATCH 103/104] update expected --- .../clang/arrays_simple.expected | 23 +-- .../clang/arrays_simple_gtirb.expected | 23 +-- .../clang/basic_arrays_read.expected | 22 +-- .../clang/basic_arrays_read_gtirb.expected | 22 +-- .../clang_pic/basic_arrays_read.expected | 34 ++-- .../basic_arrays_read_gtirb.expected | 34 ++-- .../gcc/basic_arrays_read.expected | 22 +-- .../gcc/basic_arrays_read_gtirb.expected | 22 +-- .../gcc_pic/basic_arrays_read.expected | 40 ++--- .../gcc_pic/basic_arrays_read_gtirb.expected | 40 ++--- .../clang/basic_arrays_write.expected | 21 +-- .../clang/basic_arrays_write_gtirb.expected | 21 +-- .../clang_O2/basic_arrays_write.expected | 21 +-- .../basic_arrays_write_gtirb.expected | 21 +-- .../clang_pic/basic_arrays_write.expected | 33 ++-- .../basic_arrays_write_gtirb.expected | 33 ++-- .../gcc/basic_arrays_write.expected | 21 +-- .../gcc/basic_arrays_write_gtirb.expected | 21 +-- .../gcc_O2/basic_arrays_write.expected | 21 +-- .../gcc_O2/basic_arrays_write_gtirb.expected | 21 +-- .../gcc_pic/basic_arrays_write.expected | 39 ++--- .../gcc_pic/basic_arrays_write_gtirb.expected | 39 ++--- .../clang/basic_assign_assign.expected | 21 +-- .../clang/basic_assign_assign_gtirb.expected | 21 +-- .../clang_pic/basic_assign_assign.expected | 33 ++-- .../basic_assign_assign_gtirb.expected | 33 ++-- .../gcc/basic_assign_assign.expected | 21 +-- .../gcc/basic_assign_assign_gtirb.expected | 21 +-- .../gcc_O2/basic_assign_assign.expected | 21 +-- .../gcc_O2/basic_assign_assign_gtirb.expected | 21 +-- .../gcc_pic/basic_assign_assign.expected | 39 ++--- .../basic_assign_assign_gtirb.expected | 39 ++--- .../clang/basic_assign_increment.expected | 21 +-- .../basic_assign_increment_gtirb.expected | 21 +-- .../clang_pic/basic_assign_increment.expected | 33 ++-- .../basic_assign_increment_gtirb.expected | 33 ++-- .../gcc/basic_assign_increment.expected | 21 +-- .../gcc/basic_assign_increment_gtirb.expected | 21 +-- .../gcc_O2/basic_assign_increment.expected | 21 +-- .../basic_assign_increment_gtirb.expected | 21 +-- .../gcc_pic/basic_assign_increment.expected | 39 ++--- .../basic_assign_increment_gtirb.expected | 39 ++--- .../clang/basic_function_call_caller.expected | 33 +--- .../basic_function_call_caller_gtirb.expected | 33 +--- .../basic_function_call_caller.expected | 23 +-- .../basic_function_call_caller_gtirb.expected | 23 +-- .../basic_function_call_caller.expected | 63 +++---- .../basic_function_call_caller_gtirb.expected | 63 +++---- .../gcc/basic_function_call_caller.expected | 33 +--- .../basic_function_call_caller_gtirb.expected | 33 +--- .../basic_function_call_caller.expected | 23 +-- .../basic_function_call_caller_gtirb.expected | 23 +-- .../basic_function_call_caller.expected | 73 +++----- .../basic_function_call_caller_gtirb.expected | 73 +++----- .../clang/basic_function_call_reader.expected | 23 +-- .../basic_function_call_reader_gtirb.expected | 23 +-- .../basic_function_call_reader.expected | 23 +-- .../basic_function_call_reader_gtirb.expected | 23 +-- .../basic_function_call_reader.expected | 41 ++--- .../basic_function_call_reader_gtirb.expected | 41 ++--- .../gcc/basic_function_call_reader.expected | 23 +-- .../basic_function_call_reader_gtirb.expected | 23 +-- .../basic_function_call_reader.expected | 23 +-- .../basic_function_call_reader_gtirb.expected | 23 +-- .../basic_function_call_reader.expected | 47 ++--- .../basic_function_call_reader_gtirb.expected | 47 ++--- .../clang/basic_lock_read.expected | 21 +-- .../clang/basic_lock_read_gtirb.expected | 21 +-- .../clang_O2/basic_lock_read.expected | 21 +-- .../clang_O2/basic_lock_read_gtirb.expected | 21 +-- .../clang_pic/basic_lock_read.expected | 39 ++--- .../clang_pic/basic_lock_read_gtirb.expected | 39 ++--- .../gcc/basic_lock_read.expected | 21 +-- .../gcc/basic_lock_read_gtirb.expected | 21 +-- .../gcc_O2/basic_lock_read.expected | 21 +-- .../gcc_O2/basic_lock_read_gtirb.expected | 21 +-- .../gcc_pic/basic_lock_read.expected | 45 ++--- .../gcc_pic/basic_lock_read_gtirb.expected | 45 ++--- .../clang/basic_lock_security_read.expected | 23 +-- .../basic_lock_security_read_gtirb.expected | 23 +-- .../basic_lock_security_read.expected | 23 +-- .../basic_lock_security_read_gtirb.expected | 23 +-- .../basic_lock_security_read.expected | 41 ++--- .../basic_lock_security_read_gtirb.expected | 41 ++--- .../gcc/basic_lock_security_read.expected | 23 +-- .../basic_lock_security_read_gtirb.expected | 23 +-- .../gcc_O2/basic_lock_security_read.expected | 23 +-- .../basic_lock_security_read_gtirb.expected | 23 +-- .../gcc_pic/basic_lock_security_read.expected | 47 ++--- .../basic_lock_security_read_gtirb.expected | 47 ++--- .../clang/basic_lock_security_write.expected | 23 +-- .../basic_lock_security_write_gtirb.expected | 23 +-- .../basic_lock_security_write.expected | 23 +-- .../basic_lock_security_write_gtirb.expected | 23 +-- .../basic_lock_security_write.expected | 41 ++--- .../basic_lock_security_write_gtirb.expected | 41 ++--- .../gcc/basic_lock_security_write.expected | 31 +--- .../basic_lock_security_write_gtirb.expected | 31 +--- .../gcc_O2/basic_lock_security_write.expected | 27 +-- .../basic_lock_security_write_gtirb.expected | 27 +-- .../basic_lock_security_write.expected | 55 +++--- .../basic_lock_security_write_gtirb.expected | 55 +++--- .../clang/basic_lock_unlock.expected | 25 +-- .../clang/basic_lock_unlock_gtirb.expected | 25 +-- .../clang_pic/basic_lock_unlock.expected | 43 ++--- .../basic_lock_unlock_gtirb.expected | 43 ++--- .../gcc/basic_lock_unlock.expected | 25 +-- .../gcc/basic_lock_unlock_gtirb.expected | 25 +-- .../gcc_O2/basic_lock_unlock.expected | 25 +-- .../gcc_O2/basic_lock_unlock_gtirb.expected | 25 +-- .../gcc_pic/basic_lock_unlock.expected | 49 ++---- .../gcc_pic/basic_lock_unlock_gtirb.expected | 49 ++---- .../clang/basic_loop_assign.expected | 21 +-- .../clang/basic_loop_assign_gtirb.expected | 21 +-- .../clang_pic/basic_loop_assign.expected | 33 ++-- .../basic_loop_assign_gtirb.expected | 33 ++-- .../gcc/basic_loop_assign.expected | 21 +-- .../gcc/basic_loop_assign_gtirb.expected | 21 +-- .../gcc_O2/basic_loop_assign.expected | 21 +-- .../gcc_O2/basic_loop_assign_gtirb.expected | 21 +-- .../gcc_pic/basic_loop_assign.expected | 39 ++--- .../gcc_pic/basic_loop_assign_gtirb.expected | 39 ++--- .../clang/basic_operation_evaluation.expected | 23 +-- .../basic_operation_evaluation_gtirb.expected | 23 +-- .../gcc/basic_operation_evaluation.expected | 23 +-- .../basic_operation_evaluation_gtirb.expected | 23 +-- .../clang/basic_sec_policy_read.expected | 23 +-- .../basic_sec_policy_read_gtirb.expected | 23 +-- .../clang_O2/basic_sec_policy_read.expected | 23 +-- .../basic_sec_policy_read_gtirb.expected | 23 +-- .../clang_pic/basic_sec_policy_read.expected | 41 ++--- .../basic_sec_policy_read_gtirb.expected | 41 ++--- .../gcc/basic_sec_policy_read.expected | 23 +-- .../gcc/basic_sec_policy_read_gtirb.expected | 23 +-- .../gcc_O2/basic_sec_policy_read.expected | 23 +-- .../basic_sec_policy_read_gtirb.expected | 23 +-- .../gcc_pic/basic_sec_policy_read.expected | 47 ++--- .../basic_sec_policy_read_gtirb.expected | 47 ++--- .../clang/basic_sec_policy_write.expected | 23 +-- .../basic_sec_policy_write_gtirb.expected | 23 +-- .../clang_O2/basic_sec_policy_write.expected | 23 +-- .../basic_sec_policy_write_gtirb.expected | 23 +-- .../clang_pic/basic_sec_policy_write.expected | 41 ++--- .../basic_sec_policy_write_gtirb.expected | 41 ++--- .../gcc/basic_sec_policy_write.expected | 23 +-- .../gcc/basic_sec_policy_write_gtirb.expected | 23 +-- .../gcc_O2/basic_sec_policy_write.expected | 23 +-- .../basic_sec_policy_write_gtirb.expected | 23 +-- .../gcc_pic/basic_sec_policy_write.expected | 47 ++--- .../basic_sec_policy_write_gtirb.expected | 47 ++--- .../clang/basicassign_gamma0.expected | 21 +-- .../clang/basicassign_gamma0_gtirb.expected | 21 +-- .../clang_pic/basicassign_gamma0.expected | 39 ++--- .../basicassign_gamma0_gtirb.expected | 39 ++--- .../gcc/basicassign_gamma0.expected | 21 +-- .../gcc/basicassign_gamma0_gtirb.expected | 21 +-- .../gcc_O2/basicassign_gamma0.expected | 21 +-- .../gcc_O2/basicassign_gamma0_gtirb.expected | 21 +-- .../gcc_pic/basicassign_gamma0.expected | 45 ++--- .../gcc_pic/basicassign_gamma0_gtirb.expected | 45 ++--- .../basicfree/clang/basicfree.expected | 49 ++---- .../basicfree/clang/basicfree_gtirb.expected | 69 ++------ .../correct/basicfree/gcc/basicfree.expected | 49 ++---- .../basicfree/gcc/basicfree_gtirb.expected | 69 ++------ src/test/correct/cjump/clang/cjump.expected | 25 +-- .../correct/cjump/clang/cjump_gtirb.expected | 25 +-- .../correct/cjump/clang_pic/cjump.expected | 43 ++--- .../cjump/clang_pic/cjump_gtirb.expected | 43 ++--- src/test/correct/cjump/gcc/cjump.expected | 25 +-- .../correct/cjump/gcc/cjump_gtirb.expected | 25 +-- src/test/correct/cjump/gcc_pic/cjump.expected | 49 ++---- .../cjump/gcc_pic/cjump_gtirb.expected | 49 ++---- .../clang/floatingpoint_gtirb.expected | 36 ++-- .../gcc/floatingpoint_gtirb.expected | 36 ++-- .../correct/function/clang/function.expected | 39 ++--- .../function/clang/function_gtirb.expected | 39 ++--- .../function/clang_pic/function.expected | 69 +++----- .../clang_pic/function_gtirb.expected | 69 +++----- .../correct/function/gcc/function.expected | 39 ++--- .../function/gcc/function_gtirb.expected | 39 ++--- .../function/gcc_pic/function.expected | 79 ++++----- .../function/gcc_pic/function_gtirb.expected | 79 ++++----- .../function1/clang/function1.expected | 6 +- .../function1/clang/function1_gtirb.expected | 6 +- .../function1/clang_O2/function1.expected | 6 +- .../clang_O2/function1_gtirb.expected | 6 +- .../correct/function1/gcc/function1.expected | 6 +- .../function1/gcc/function1_gtirb.expected | 6 +- .../function1/gcc_O2/function1.expected | 6 +- .../function1/gcc_O2/function1_gtirb.expected | 6 +- .../clang/functionpointer_gtirb.expected | 55 ++---- .../clang_pic/functionpointer_gtirb.expected | 145 ++++++---------- .../gcc/functionpointer_gtirb.expected | 55 ++---- .../gcc_O2/functionpointer_gtirb.expected | 39 ++--- .../gcc_pic/functionpointer_gtirb.expected | 163 +++++++----------- .../clang/functions_with_params.expected | 33 +--- .../functions_with_params_gtirb.expected | 33 +--- .../gcc/functions_with_params.expected | 33 +--- .../gcc/functions_with_params_gtirb.expected | 33 +--- .../ifbranches/clang/ifbranches.expected | 23 +-- .../clang/ifbranches_gtirb.expected | 23 +-- .../ifbranches/clang_O2/ifbranches.expected | 27 +-- .../clang_O2/ifbranches_gtirb.expected | 27 +-- .../ifbranches/gcc/ifbranches.expected | 23 +-- .../ifbranches/gcc/ifbranches_gtirb.expected | 23 +-- .../ifbranches/gcc_O2/ifbranches.expected | 27 +-- .../gcc_O2/ifbranches_gtirb.expected | 27 +-- .../correct/ifglobal/clang/ifglobal.expected | 25 +-- .../ifglobal/clang/ifglobal_gtirb.expected | 25 +-- .../ifglobal/clang_O2/ifglobal.expected | 25 +-- .../ifglobal/clang_O2/ifglobal_gtirb.expected | 25 +-- .../ifglobal/clang_pic/ifglobal.expected | 37 ++-- .../clang_pic/ifglobal_gtirb.expected | 37 ++-- .../correct/ifglobal/gcc/ifglobal.expected | 25 +-- .../ifglobal/gcc/ifglobal_gtirb.expected | 25 +-- .../correct/ifglobal/gcc_O2/ifglobal.expected | 25 +-- .../ifglobal/gcc_O2/ifglobal_gtirb.expected | 25 +-- .../ifglobal/gcc_pic/ifglobal.expected | 43 ++--- .../ifglobal/gcc_pic/ifglobal_gtirb.expected | 43 ++--- .../clang/indirect_call_gtirb.expected | 6 +- .../clang_pic/indirect_call_gtirb.expected | 42 ++--- .../gcc/indirect_call_gtirb.expected | 6 +- .../gcc_pic/indirect_call_gtirb.expected | 78 ++++----- .../clang/initialisation.expected | 21 +-- .../clang/initialisation_gtirb.expected | 21 +-- .../clang_O2/initialisation.expected | 21 +-- .../clang_O2/initialisation_gtirb.expected | 21 +-- .../clang_pic/initialisation.expected | 51 +++--- .../clang_pic/initialisation_gtirb.expected | 51 +++--- .../gcc/initialisation.expected | 21 +-- .../gcc/initialisation_gtirb.expected | 21 +-- .../gcc_O2/initialisation.expected | 21 +-- .../gcc_O2/initialisation_gtirb.expected | 21 +-- .../gcc_pic/initialisation.expected | 51 +++--- .../gcc_pic/initialisation_gtirb.expected | 51 +++--- .../clang/jumptable2_gtirb.expected | 145 ++++++---------- .../clang_O2/jumptable2_gtirb.expected | 145 ++++++---------- .../clang_pic/jumptable2_gtirb.expected | 145 ++++++---------- .../jumptable2/gcc/jumptable2_gtirb.expected | 109 +++++------- .../gcc_O2/jumptable2_gtirb.expected | 109 +++++------- .../gcc_pic/jumptable2_gtirb.expected | 145 ++++++---------- .../clang/malloc_with_local.expected | 6 +- .../clang/malloc_with_local_gtirb.expected | 6 +- .../clang_O2/malloc_with_local.expected | 4 +- .../clang_O2/malloc_with_local_gtirb.expected | 6 +- .../gcc/malloc_with_local.expected | 6 +- .../gcc/malloc_with_local_gtirb.expected | 6 +- .../gcc_O2/malloc_with_local.expected | 4 +- .../gcc_O2/malloc_with_local_gtirb.expected | 6 +- .../clang/malloc_with_local2.expected | 6 +- .../clang/malloc_with_local2_gtirb.expected | 6 +- .../gcc/malloc_with_local2.expected | 6 +- .../gcc/malloc_with_local2_gtirb.expected | 6 +- .../clang/malloc_with_local3.expected | 6 +- .../clang/malloc_with_local3_gtirb.expected | 6 +- .../gcc/malloc_with_local3.expected | 6 +- .../gcc/malloc_with_local3_gtirb.expected | 6 +- .../gcc_O2/malloc_with_local3.expected | 8 +- .../gcc_O2/malloc_with_local3_gtirb.expected | 6 +- .../multi_malloc/clang/multi_malloc.expected | 6 +- .../clang/multi_malloc_gtirb.expected | 6 +- .../multi_malloc/gcc/multi_malloc.expected | 6 +- .../gcc/multi_malloc_gtirb.expected | 6 +- .../clang/no_interference_update_x.expected | 21 +-- .../no_interference_update_x_gtirb.expected | 21 +-- .../no_interference_update_x.expected | 33 ++-- .../no_interference_update_x_gtirb.expected | 33 ++-- .../gcc/no_interference_update_x.expected | 21 +-- .../no_interference_update_x_gtirb.expected | 21 +-- .../gcc_O2/no_interference_update_x.expected | 21 +-- .../no_interference_update_x_gtirb.expected | 21 +-- .../gcc_pic/no_interference_update_x.expected | 39 ++--- .../no_interference_update_x_gtirb.expected | 39 ++--- .../clang/no_interference_update_y.expected | 21 +-- .../no_interference_update_y_gtirb.expected | 21 +-- .../no_interference_update_y.expected | 33 ++-- .../no_interference_update_y_gtirb.expected | 33 ++-- .../gcc/no_interference_update_y.expected | 21 +-- .../no_interference_update_y_gtirb.expected | 21 +-- .../gcc_O2/no_interference_update_y.expected | 21 +-- .../no_interference_update_y_gtirb.expected | 21 +-- .../gcc_pic/no_interference_update_y.expected | 39 ++--- .../no_interference_update_y_gtirb.expected | 39 ++--- .../secret_write/clang/secret_write.expected | 23 +-- .../clang/secret_write_gtirb.expected | 23 +-- .../clang_O2/secret_write.expected | 23 +-- .../clang_O2/secret_write_gtirb.expected | 23 +-- .../clang_pic/secret_write.expected | 47 ++--- .../clang_pic/secret_write_gtirb.expected | 47 ++--- .../secret_write/gcc/secret_write.expected | 23 +-- .../gcc/secret_write_gtirb.expected | 23 +-- .../secret_write/gcc_O2/secret_write.expected | 23 +-- .../gcc_O2/secret_write_gtirb.expected | 23 +-- .../gcc_pic/secret_write.expected | 53 +++--- .../gcc_pic/secret_write_gtirb.expected | 53 +++--- src/test/correct/switch/clang/switch.expected | 23 +-- .../switch/clang/switch_gtirb.expected | 23 +-- .../correct/switch/clang_O2/switch.expected | 27 +-- .../switch/clang_O2/switch_gtirb.expected | 27 +-- src/test/correct/switch/gcc/switch.expected | 23 +-- .../correct/switch/gcc/switch_gtirb.expected | 23 +-- .../correct/switch/gcc_O2/switch.expected | 27 +-- .../switch/gcc_O2/switch_gtirb.expected | 27 +-- .../correct/syscall/clang/syscall.expected | 33 +--- .../syscall/clang/syscall_gtirb.expected | 45 ++--- .../syscall/clang_O2/syscall_gtirb.expected | 49 ++---- src/test/correct/syscall/gcc/syscall.expected | 33 +--- .../syscall/gcc/syscall_gtirb.expected | 45 ++--- .../correct/syscall/gcc_O2/syscall.expected | 27 +-- .../syscall/gcc_O2/syscall_gtirb.expected | 49 ++---- .../clang/using_gamma_conditional.expected | 23 +-- .../using_gamma_conditional_gtirb.expected | 23 +-- .../clang_O2/using_gamma_conditional.expected | 23 +-- .../using_gamma_conditional_gtirb.expected | 23 +-- .../using_gamma_conditional.expected | 35 ++-- .../using_gamma_conditional_gtirb.expected | 35 ++-- .../gcc/using_gamma_conditional.expected | 23 +-- .../using_gamma_conditional_gtirb.expected | 23 +-- .../gcc_pic/using_gamma_conditional.expected | 41 ++--- .../using_gamma_conditional_gtirb.expected | 41 ++--- .../clang/using_gamma_write_z.expected | 24 +-- .../clang/using_gamma_write_z_gtirb.expected | 24 +-- .../clang_pic/using_gamma_write_z.expected | 36 ++-- .../using_gamma_write_z_gtirb.expected | 36 ++-- .../gcc/using_gamma_write_z.expected | 24 +-- .../gcc/using_gamma_write_z_gtirb.expected | 24 +-- .../gcc_O2/using_gamma_write_z.expected | 24 +-- .../gcc_O2/using_gamma_write_z_gtirb.expected | 24 +-- .../gcc_pic/using_gamma_write_z.expected | 42 ++--- .../using_gamma_write_z_gtirb.expected | 42 ++--- .../basicassign/clang/basicassign.expected | 21 +-- .../clang/basicassign_gtirb.expected | 21 +-- .../basicassign/clang_O2/basicassign.expected | 21 +-- .../clang_O2/basicassign_gtirb.expected | 21 +-- .../clang_pic/basicassign.expected | 45 ++--- .../clang_pic/basicassign_gtirb.expected | 45 ++--- .../basicassign/gcc/basicassign.expected | 21 +-- .../gcc/basicassign_gtirb.expected | 21 +-- .../basicassign/gcc_O2/basicassign.expected | 21 +-- .../gcc_O2/basicassign_gtirb.expected | 21 +-- .../basicassign/gcc_pic/basicassign.expected | 51 ++---- .../gcc_pic/basicassign_gtirb.expected | 51 ++---- .../basicassign1/clang/basicassign1.expected | 25 +-- .../clang/basicassign1_gtirb.expected | 25 +-- .../clang_O2/basicassign1.expected | 25 +-- .../clang_O2/basicassign1_gtirb.expected | 25 +-- .../clang_pic/basicassign1.expected | 43 ++--- .../clang_pic/basicassign1_gtirb.expected | 43 ++--- .../basicassign1/gcc/basicassign1.expected | 25 +-- .../gcc/basicassign1_gtirb.expected | 25 +-- .../basicassign1/gcc_O2/basicassign1.expected | 25 +-- .../gcc_O2/basicassign1_gtirb.expected | 25 +-- .../gcc_pic/basicassign1.expected | 49 ++---- .../gcc_pic/basicassign1_gtirb.expected | 49 ++---- .../basicassign2/clang/basicassign2.expected | 29 ++-- .../clang/basicassign2_gtirb.expected | 29 ++-- .../clang_O2/basicassign2.expected | 29 ++-- .../clang_O2/basicassign2_gtirb.expected | 29 ++-- .../clang_pic/basicassign2.expected | 47 ++--- .../clang_pic/basicassign2_gtirb.expected | 47 ++--- .../basicassign2/gcc/basicassign2.expected | 29 ++-- .../gcc/basicassign2_gtirb.expected | 29 ++-- .../basicassign2/gcc_O2/basicassign2.expected | 29 ++-- .../gcc_O2/basicassign2_gtirb.expected | 29 ++-- .../gcc_pic/basicassign2.expected | 53 +++--- .../gcc_pic/basicassign2_gtirb.expected | 53 +++--- .../basicassign3/clang/basicassign3.expected | 25 ++- .../clang/basicassign3_gtirb.expected | 25 ++- .../clang_O2/basicassign3.expected | 25 ++- .../clang_O2/basicassign3_gtirb.expected | 25 ++- .../clang_pic/basicassign3.expected | 43 ++--- .../clang_pic/basicassign3_gtirb.expected | 43 ++--- .../basicassign3/gcc/basicassign3.expected | 25 ++- .../gcc/basicassign3_gtirb.expected | 25 ++- .../basicassign3/gcc_O2/basicassign3.expected | 25 ++- .../gcc_O2/basicassign3_gtirb.expected | 25 ++- .../gcc_pic/basicassign3.expected | 49 +++--- .../gcc_pic/basicassign3_gtirb.expected | 49 +++--- .../incorrect/iflocal/clang/iflocal.expected | 23 +-- .../iflocal/clang/iflocal_gtirb.expected | 23 +-- .../incorrect/iflocal/gcc/iflocal.expected | 23 +-- .../iflocal/gcc/iflocal_gtirb.expected | 23 +-- .../clang/nestedifglobal.expected | 25 +-- .../clang/nestedifglobal_gtirb.expected | 25 +-- .../clang_pic/nestedifglobal.expected | 43 ++--- .../clang_pic/nestedifglobal_gtirb.expected | 43 ++--- .../gcc/nestedifglobal.expected | 25 +-- .../gcc/nestedifglobal_gtirb.expected | 25 +-- .../gcc_pic/nestedifglobal.expected | 49 ++---- .../gcc_pic/nestedifglobal_gtirb.expected | 49 ++---- 390 files changed, 3815 insertions(+), 8585 deletions(-) diff --git a/src/test/correct/arrays_simple/clang/arrays_simple.expected b/src/test/correct/arrays_simple/clang/arrays_simple.expected index c24e7d31d..73aa783a4 100644 --- a/src/test/correct/arrays_simple/clang/arrays_simple.expected +++ b/src/test/correct/arrays_simple/clang/arrays_simple.expected @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -38,12 +34,9 @@ function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1872bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1873bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1874bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1875bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1872bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -51,8 +44,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -69,20 +62,14 @@ procedure main(); modifies Gamma_R0, Gamma_R31, Gamma_R8, Gamma_stack, R0, R31, R8, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1872bv64) == 1bv8); - free requires (memory_load8_le(mem, 1873bv64) == 0bv8); - free requires (memory_load8_le(mem, 1874bv64) == 2bv8); - free requires (memory_load8_le(mem, 1875bv64) == 0bv8); + free requires (memory_load32_le(mem, 1872bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1872bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1873bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1874bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1875bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1872bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/arrays_simple/clang/arrays_simple_gtirb.expected b/src/test/correct/arrays_simple/clang/arrays_simple_gtirb.expected index b58ac9c72..eda5cac95 100644 --- a/src/test/correct/arrays_simple/clang/arrays_simple_gtirb.expected +++ b/src/test/correct/arrays_simple/clang/arrays_simple_gtirb.expected @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -38,12 +34,9 @@ function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1872bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1873bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1874bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1875bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1872bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -51,8 +44,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -69,20 +62,14 @@ procedure main(); modifies Gamma_R0, Gamma_R31, Gamma_R8, Gamma_stack, R0, R31, R8, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1872bv64) == 1bv8); - free requires (memory_load8_le(mem, 1873bv64) == 0bv8); - free requires (memory_load8_le(mem, 1874bv64) == 2bv8); - free requires (memory_load8_le(mem, 1875bv64) == 0bv8); + free requires (memory_load32_le(mem, 1872bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1872bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1873bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1874bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1875bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1872bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_arrays_read/clang/basic_arrays_read.expected b/src/test/correct/basic_arrays_read/clang/basic_arrays_read.expected index f4ac60347..1fcc04f91 100644 --- a/src/test/correct/basic_arrays_read/clang/basic_arrays_read.expected +++ b/src/test/correct/basic_arrays_read/clang/basic_arrays_read.expected @@ -10,7 +10,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -44,10 +40,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, bvadd64($arr_addr, 0bv64))) == memory_load32_le(mem, bvadd64($arr_addr, 0bv64))); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -82,20 +75,14 @@ procedure main(); modifies Gamma_R0, Gamma_R31, Gamma_R8, Gamma_mem, Gamma_stack, R0, R31, R8, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1860bv64) == 1bv8); - free requires (memory_load8_le(mem, 1861bv64) == 0bv8); - free requires (memory_load8_le(mem, 1862bv64) == 2bv8); - free requires (memory_load8_le(mem, 1863bv64) == 0bv8); + free requires (memory_load32_le(mem, 1860bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -112,7 +99,6 @@ implementation main() call rely(); assert (L(mem, bvadd64(R8, 52bv64)) ==> true); mem, Gamma_mem := memory_store32_le(mem, bvadd64(R8, 52bv64), 0bv32), gamma_store32(Gamma_mem, bvadd64(R8, 52bv64), true); - assert true; assume {:captureState "%000002dd"} true; call rely(); R0, Gamma_R0 := zero_extend32_32(memory_load32_le(mem, bvadd64(R8, 52bv64))), (gamma_load32(Gamma_mem, bvadd64(R8, 52bv64)) || L(mem, bvadd64(R8, 52bv64))); diff --git a/src/test/correct/basic_arrays_read/clang/basic_arrays_read_gtirb.expected b/src/test/correct/basic_arrays_read/clang/basic_arrays_read_gtirb.expected index 8e4bac79d..f4cde0c06 100644 --- a/src/test/correct/basic_arrays_read/clang/basic_arrays_read_gtirb.expected +++ b/src/test/correct/basic_arrays_read/clang/basic_arrays_read_gtirb.expected @@ -10,7 +10,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -44,10 +40,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, bvadd64($arr_addr, 0bv64))) == memory_load32_le(mem, bvadd64($arr_addr, 0bv64))); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -82,20 +75,14 @@ procedure main(); modifies Gamma_R0, Gamma_R31, Gamma_R8, Gamma_mem, Gamma_stack, R0, R31, R8, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1860bv64) == 1bv8); - free requires (memory_load8_le(mem, 1861bv64) == 0bv8); - free requires (memory_load8_le(mem, 1862bv64) == 2bv8); - free requires (memory_load8_le(mem, 1863bv64) == 0bv8); + free requires (memory_load32_le(mem, 1860bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -112,7 +99,6 @@ implementation main() call rely(); assert (L(mem, bvadd64(R8, 52bv64)) ==> true); mem, Gamma_mem := memory_store32_le(mem, bvadd64(R8, 52bv64), 0bv32), gamma_store32(Gamma_mem, bvadd64(R8, 52bv64), true); - assert true; assume {:captureState "1824$0"} true; call rely(); R0, Gamma_R0 := zero_extend32_32(memory_load32_le(mem, bvadd64(R8, 52bv64))), (gamma_load32(Gamma_mem, bvadd64(R8, 52bv64)) || L(mem, bvadd64(R8, 52bv64))); diff --git a/src/test/correct/basic_arrays_read/clang_pic/basic_arrays_read.expected b/src/test/correct/basic_arrays_read/clang_pic/basic_arrays_read.expected index 736e8907d..b97a02fe9 100644 --- a/src/test/correct/basic_arrays_read/clang_pic/basic_arrays_read.expected +++ b/src/test/correct/basic_arrays_read/clang_pic/basic_arrays_read.expected @@ -10,7 +10,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -35,10 +35,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -48,15 +44,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, bvadd64($arr_addr, 0bv64))) == memory_load32_le(mem, bvadd64($arr_addr, 0bv64))); - free ensures (memory_load8_le(mem, 1928bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1929bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1930bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1931bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1928bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 1876bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -87,26 +80,20 @@ procedure main(); modifies Gamma_R0, Gamma_R31, Gamma_R8, Gamma_mem, Gamma_stack, R0, R31, R8, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1928bv64) == 1bv8); - free requires (memory_load8_le(mem, 1929bv64) == 0bv8); - free requires (memory_load8_le(mem, 1930bv64) == 2bv8); - free requires (memory_load8_le(mem, 1931bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1928bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69584bv64) == 1876bv64); free requires (memory_load64_le(mem, 69592bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1928bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1929bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1930bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1931bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1928bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 1876bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { @@ -121,7 +108,6 @@ implementation main() call rely(); assert (L(mem, R8) ==> true); mem, Gamma_mem := memory_store32_le(mem, R8, 0bv32), gamma_store32(Gamma_mem, R8, true); - assert true; assume {:captureState "%000002e8"} true; call rely(); R0, Gamma_R0 := zero_extend32_32(memory_load32_le(mem, R8)), (gamma_load32(Gamma_mem, R8) || L(mem, R8)); diff --git a/src/test/correct/basic_arrays_read/clang_pic/basic_arrays_read_gtirb.expected b/src/test/correct/basic_arrays_read/clang_pic/basic_arrays_read_gtirb.expected index e65df320f..36598de3f 100644 --- a/src/test/correct/basic_arrays_read/clang_pic/basic_arrays_read_gtirb.expected +++ b/src/test/correct/basic_arrays_read/clang_pic/basic_arrays_read_gtirb.expected @@ -10,7 +10,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -35,10 +35,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -48,15 +44,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, bvadd64($arr_addr, 0bv64))) == memory_load32_le(mem, bvadd64($arr_addr, 0bv64))); - free ensures (memory_load8_le(mem, 1928bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1929bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1930bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1931bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1928bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 1876bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -87,26 +80,20 @@ procedure main(); modifies Gamma_R0, Gamma_R31, Gamma_R8, Gamma_mem, Gamma_stack, R0, R31, R8, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1928bv64) == 1bv8); - free requires (memory_load8_le(mem, 1929bv64) == 0bv8); - free requires (memory_load8_le(mem, 1930bv64) == 2bv8); - free requires (memory_load8_le(mem, 1931bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1928bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69584bv64) == 1876bv64); free requires (memory_load64_le(mem, 69592bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1928bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1929bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1930bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1931bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1928bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 1876bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { @@ -121,7 +108,6 @@ implementation main() call rely(); assert (L(mem, R8) ==> true); mem, Gamma_mem := memory_store32_le(mem, R8, 0bv32), gamma_store32(Gamma_mem, R8, true); - assert true; assume {:captureState "1892$0"} true; call rely(); R0, Gamma_R0 := zero_extend32_32(memory_load32_le(mem, R8)), (gamma_load32(Gamma_mem, R8) || L(mem, R8)); diff --git a/src/test/correct/basic_arrays_read/gcc/basic_arrays_read.expected b/src/test/correct/basic_arrays_read/gcc/basic_arrays_read.expected index cbb6c5f36..7c0e78f1a 100644 --- a/src/test/correct/basic_arrays_read/gcc/basic_arrays_read.expected +++ b/src/test/correct/basic_arrays_read/gcc/basic_arrays_read.expected @@ -4,7 +4,7 @@ var {:extern} R0: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -25,10 +25,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -38,10 +34,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, bvadd64($arr_addr, 0bv64))) == memory_load32_le(mem, bvadd64($arr_addr, 0bv64))); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -76,18 +69,12 @@ procedure main(); modifies Gamma_R0, Gamma_mem, R0, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1860bv64) == 1bv8); - free requires (memory_load8_le(mem, 1861bv64) == 0bv8); - free requires (memory_load8_le(mem, 1862bv64) == 2bv8); - free requires (memory_load8_le(mem, 1863bv64) == 0bv8); + free requires (memory_load32_le(mem, 1860bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -102,7 +89,6 @@ implementation main() call rely(); assert (L(mem, R0) ==> true); mem, Gamma_mem := memory_store32_le(mem, R0, 0bv32), gamma_store32(Gamma_mem, R0, true); - assert true; assume {:captureState "%000002d6"} true; R0, Gamma_R0 := 69632bv64, true; R0, Gamma_R0 := bvadd64(R0, 24bv64), Gamma_R0; diff --git a/src/test/correct/basic_arrays_read/gcc/basic_arrays_read_gtirb.expected b/src/test/correct/basic_arrays_read/gcc/basic_arrays_read_gtirb.expected index 5a1213e87..c0bf7949f 100644 --- a/src/test/correct/basic_arrays_read/gcc/basic_arrays_read_gtirb.expected +++ b/src/test/correct/basic_arrays_read/gcc/basic_arrays_read_gtirb.expected @@ -4,7 +4,7 @@ var {:extern} R0: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -25,10 +25,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -38,10 +34,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, bvadd64($arr_addr, 0bv64))) == memory_load32_le(mem, bvadd64($arr_addr, 0bv64))); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -76,18 +69,12 @@ procedure main(); modifies Gamma_R0, Gamma_mem, R0, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1860bv64) == 1bv8); - free requires (memory_load8_le(mem, 1861bv64) == 0bv8); - free requires (memory_load8_le(mem, 1862bv64) == 2bv8); - free requires (memory_load8_le(mem, 1863bv64) == 0bv8); + free requires (memory_load32_le(mem, 1860bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -102,7 +89,6 @@ implementation main() call rely(); assert (L(mem, R0) ==> true); mem, Gamma_mem := memory_store32_le(mem, R0, 0bv32), gamma_store32(Gamma_mem, R0, true); - assert true; assume {:captureState "1820$0"} true; R0, Gamma_R0 := 69632bv64, true; R0, Gamma_R0 := bvadd64(R0, 24bv64), Gamma_R0; diff --git a/src/test/correct/basic_arrays_read/gcc_pic/basic_arrays_read.expected b/src/test/correct/basic_arrays_read/gcc_pic/basic_arrays_read.expected index 67045570e..99eaf7326 100644 --- a/src/test/correct/basic_arrays_read/gcc_pic/basic_arrays_read.expected +++ b/src/test/correct/basic_arrays_read/gcc_pic/basic_arrays_read.expected @@ -4,7 +4,7 @@ var {:extern} R0: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -42,15 +38,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, bvadd64($arr_addr, 0bv64))) == memory_load32_le(mem, bvadd64($arr_addr, 0bv64))); - free ensures (memory_load8_le(mem, 1924bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1925bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1926bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1927bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1924bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69608bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69616bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -81,24 +74,18 @@ procedure main(); modifies Gamma_R0, Gamma_mem, R0, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1924bv64) == 1bv8); - free requires (memory_load8_le(mem, 1925bv64) == 0bv8); - free requires (memory_load8_le(mem, 1926bv64) == 2bv8); - free requires (memory_load8_le(mem, 1927bv64) == 0bv8); + free requires (memory_load32_le(mem, 1924bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69608bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free requires (memory_load64_le(mem, 69616bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); - free ensures (memory_load8_le(mem, 1924bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1925bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1926bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1927bv64) == 0bv8); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 1924bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69608bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69616bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { @@ -110,7 +97,6 @@ implementation main() call rely(); assert (L(mem, R0) ==> true); mem, Gamma_mem := memory_store32_le(mem, R0, 0bv32), gamma_store32(Gamma_mem, R0, true); - assert true; assume {:captureState "%000002d7"} true; R0, Gamma_R0 := 65536bv64, true; call rely(); diff --git a/src/test/correct/basic_arrays_read/gcc_pic/basic_arrays_read_gtirb.expected b/src/test/correct/basic_arrays_read/gcc_pic/basic_arrays_read_gtirb.expected index 1cb1a7c77..9015de36a 100644 --- a/src/test/correct/basic_arrays_read/gcc_pic/basic_arrays_read_gtirb.expected +++ b/src/test/correct/basic_arrays_read/gcc_pic/basic_arrays_read_gtirb.expected @@ -4,7 +4,7 @@ var {:extern} R0: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -42,15 +38,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, bvadd64($arr_addr, 0bv64))) == memory_load32_le(mem, bvadd64($arr_addr, 0bv64))); - free ensures (memory_load8_le(mem, 1924bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1925bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1926bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1927bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1924bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69608bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69616bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -81,24 +74,18 @@ procedure main(); modifies Gamma_R0, Gamma_mem, R0, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1924bv64) == 1bv8); - free requires (memory_load8_le(mem, 1925bv64) == 0bv8); - free requires (memory_load8_le(mem, 1926bv64) == 2bv8); - free requires (memory_load8_le(mem, 1927bv64) == 0bv8); + free requires (memory_load32_le(mem, 1924bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69608bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free requires (memory_load64_le(mem, 69616bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); - free ensures (memory_load8_le(mem, 1924bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1925bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1926bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1927bv64) == 0bv8); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 1924bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69608bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69616bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { @@ -110,7 +97,6 @@ implementation main() call rely(); assert (L(mem, R0) ==> true); mem, Gamma_mem := memory_store32_le(mem, R0, 0bv32), gamma_store32(Gamma_mem, R0, true); - assert true; assume {:captureState "1884$0"} true; R0, Gamma_R0 := 65536bv64, true; call rely(); diff --git a/src/test/correct/basic_arrays_write/clang/basic_arrays_write.expected b/src/test/correct/basic_arrays_write/clang/basic_arrays_write.expected index 61947ed82..02898a966 100644 --- a/src/test/correct/basic_arrays_write/clang/basic_arrays_write.expected +++ b/src/test/correct/basic_arrays_write/clang/basic_arrays_write.expected @@ -12,7 +12,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -46,10 +42,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures true; - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -85,20 +78,14 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1868bv64) == 1bv8); - free requires (memory_load8_le(mem, 1869bv64) == 0bv8); - free requires (memory_load8_le(mem, 1870bv64) == 2bv8); - free requires (memory_load8_le(mem, 1871bv64) == 0bv8); + free requires (memory_load32_le(mem, 1868bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_arrays_write/clang/basic_arrays_write_gtirb.expected b/src/test/correct/basic_arrays_write/clang/basic_arrays_write_gtirb.expected index e8ee268ce..c0e68cb5f 100644 --- a/src/test/correct/basic_arrays_write/clang/basic_arrays_write_gtirb.expected +++ b/src/test/correct/basic_arrays_write/clang/basic_arrays_write_gtirb.expected @@ -12,7 +12,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -46,10 +42,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures true; - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -85,20 +78,14 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1868bv64) == 1bv8); - free requires (memory_load8_le(mem, 1869bv64) == 0bv8); - free requires (memory_load8_le(mem, 1870bv64) == 2bv8); - free requires (memory_load8_le(mem, 1871bv64) == 0bv8); + free requires (memory_load32_le(mem, 1868bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_arrays_write/clang_O2/basic_arrays_write.expected b/src/test/correct/basic_arrays_write/clang_O2/basic_arrays_write.expected index a86300943..cd666691a 100644 --- a/src/test/correct/basic_arrays_write/clang_O2/basic_arrays_write.expected +++ b/src/test/correct/basic_arrays_write/clang_O2/basic_arrays_write.expected @@ -8,7 +8,7 @@ var {:extern} R9: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -25,10 +25,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -38,10 +34,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures true; - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -77,18 +70,12 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1852bv64) == 1bv8); - free requires (memory_load8_le(mem, 1853bv64) == 0bv8); - free requires (memory_load8_le(mem, 1854bv64) == 2bv8); - free requires (memory_load8_le(mem, 1855bv64) == 0bv8); + free requires (memory_load32_le(mem, 1852bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_arrays_write/clang_O2/basic_arrays_write_gtirb.expected b/src/test/correct/basic_arrays_write/clang_O2/basic_arrays_write_gtirb.expected index 4e59a1e8e..7b61c3082 100644 --- a/src/test/correct/basic_arrays_write/clang_O2/basic_arrays_write_gtirb.expected +++ b/src/test/correct/basic_arrays_write/clang_O2/basic_arrays_write_gtirb.expected @@ -8,7 +8,7 @@ var {:extern} R9: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -25,10 +25,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -38,10 +34,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures true; - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -77,18 +70,12 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1852bv64) == 1bv8); - free requires (memory_load8_le(mem, 1853bv64) == 0bv8); - free requires (memory_load8_le(mem, 1854bv64) == 2bv8); - free requires (memory_load8_le(mem, 1855bv64) == 0bv8); + free requires (memory_load32_le(mem, 1852bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_arrays_write/clang_pic/basic_arrays_write.expected b/src/test/correct/basic_arrays_write/clang_pic/basic_arrays_write.expected index 228ad3272..a9469c047 100644 --- a/src/test/correct/basic_arrays_write/clang_pic/basic_arrays_write.expected +++ b/src/test/correct/basic_arrays_write/clang_pic/basic_arrays_write.expected @@ -12,7 +12,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -37,10 +37,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,15 +46,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures true; - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 1876bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -90,26 +83,20 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1932bv64) == 1bv8); - free requires (memory_load8_le(mem, 1933bv64) == 0bv8); - free requires (memory_load8_le(mem, 1934bv64) == 2bv8); - free requires (memory_load8_le(mem, 1935bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1932bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69584bv64) == 1876bv64); free requires (memory_load64_le(mem, 69592bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 1876bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_arrays_write/clang_pic/basic_arrays_write_gtirb.expected b/src/test/correct/basic_arrays_write/clang_pic/basic_arrays_write_gtirb.expected index d89eb58c2..b6eec67eb 100644 --- a/src/test/correct/basic_arrays_write/clang_pic/basic_arrays_write_gtirb.expected +++ b/src/test/correct/basic_arrays_write/clang_pic/basic_arrays_write_gtirb.expected @@ -12,7 +12,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -37,10 +37,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,15 +46,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures true; - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 1876bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -90,26 +83,20 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1932bv64) == 1bv8); - free requires (memory_load8_le(mem, 1933bv64) == 0bv8); - free requires (memory_load8_le(mem, 1934bv64) == 2bv8); - free requires (memory_load8_le(mem, 1935bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1932bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69584bv64) == 1876bv64); free requires (memory_load64_le(mem, 69592bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 1876bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_arrays_write/gcc/basic_arrays_write.expected b/src/test/correct/basic_arrays_write/gcc/basic_arrays_write.expected index 0dcd266ac..10630740e 100644 --- a/src/test/correct/basic_arrays_write/gcc/basic_arrays_write.expected +++ b/src/test/correct/basic_arrays_write/gcc/basic_arrays_write.expected @@ -10,7 +10,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -44,10 +40,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures true; - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -83,20 +76,14 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1868bv64) == 1bv8); - free requires (memory_load8_le(mem, 1869bv64) == 0bv8); - free requires (memory_load8_le(mem, 1870bv64) == 2bv8); - free requires (memory_load8_le(mem, 1871bv64) == 0bv8); + free requires (memory_load32_le(mem, 1868bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_arrays_write/gcc/basic_arrays_write_gtirb.expected b/src/test/correct/basic_arrays_write/gcc/basic_arrays_write_gtirb.expected index db3755058..f2284d404 100644 --- a/src/test/correct/basic_arrays_write/gcc/basic_arrays_write_gtirb.expected +++ b/src/test/correct/basic_arrays_write/gcc/basic_arrays_write_gtirb.expected @@ -10,7 +10,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -44,10 +40,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures true; - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -83,20 +76,14 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1868bv64) == 1bv8); - free requires (memory_load8_le(mem, 1869bv64) == 0bv8); - free requires (memory_load8_le(mem, 1870bv64) == 2bv8); - free requires (memory_load8_le(mem, 1871bv64) == 0bv8); + free requires (memory_load32_le(mem, 1868bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_arrays_write/gcc_O2/basic_arrays_write.expected b/src/test/correct/basic_arrays_write/gcc_O2/basic_arrays_write.expected index 8f533dbc6..846543f7b 100644 --- a/src/test/correct/basic_arrays_write/gcc_O2/basic_arrays_write.expected +++ b/src/test/correct/basic_arrays_write/gcc_O2/basic_arrays_write.expected @@ -8,7 +8,7 @@ var {:extern} R2: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -25,10 +25,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -38,10 +34,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures true; - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -77,18 +70,12 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_arrays_write/gcc_O2/basic_arrays_write_gtirb.expected b/src/test/correct/basic_arrays_write/gcc_O2/basic_arrays_write_gtirb.expected index b212ef9a6..0fe8d5e9c 100644 --- a/src/test/correct/basic_arrays_write/gcc_O2/basic_arrays_write_gtirb.expected +++ b/src/test/correct/basic_arrays_write/gcc_O2/basic_arrays_write_gtirb.expected @@ -8,7 +8,7 @@ var {:extern} R2: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -25,10 +25,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -38,10 +34,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures true; - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -77,18 +70,12 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_arrays_write/gcc_pic/basic_arrays_write.expected b/src/test/correct/basic_arrays_write/gcc_pic/basic_arrays_write.expected index 40aa5a8db..244532440 100644 --- a/src/test/correct/basic_arrays_write/gcc_pic/basic_arrays_write.expected +++ b/src/test/correct/basic_arrays_write/gcc_pic/basic_arrays_write.expected @@ -10,7 +10,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -35,10 +35,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -48,15 +44,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures true; - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69608bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69616bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -88,26 +81,20 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1932bv64) == 1bv8); - free requires (memory_load8_le(mem, 1933bv64) == 0bv8); - free requires (memory_load8_le(mem, 1934bv64) == 2bv8); - free requires (memory_load8_le(mem, 1935bv64) == 0bv8); + free requires (memory_load32_le(mem, 1932bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69608bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free requires (memory_load64_le(mem, 69616bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69608bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69616bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basic_arrays_write/gcc_pic/basic_arrays_write_gtirb.expected b/src/test/correct/basic_arrays_write/gcc_pic/basic_arrays_write_gtirb.expected index 76d4e4682..bffaf2ca4 100644 --- a/src/test/correct/basic_arrays_write/gcc_pic/basic_arrays_write_gtirb.expected +++ b/src/test/correct/basic_arrays_write/gcc_pic/basic_arrays_write_gtirb.expected @@ -10,7 +10,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $arr_addr: bv64; axiom ($arr_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if ((index == bvadd64($arr_addr, 4bv64)) || (index == bvadd64($arr_addr, 0bv64))) then false else false) } @@ -35,10 +35,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -48,15 +44,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures true; - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69608bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69616bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -88,26 +81,20 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1932bv64) == 1bv8); - free requires (memory_load8_le(mem, 1933bv64) == 0bv8); - free requires (memory_load8_le(mem, 1934bv64) == 2bv8); - free requires (memory_load8_le(mem, 1935bv64) == 0bv8); + free requires (memory_load32_le(mem, 1932bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69608bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free requires (memory_load64_le(mem, 69616bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69608bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69616bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basic_assign_assign/clang/basic_assign_assign.expected b/src/test/correct/basic_assign_assign/clang/basic_assign_assign.expected index ebed184a4..4eac10daa 100644 --- a/src/test/correct/basic_assign_assign/clang/basic_assign_assign.expected +++ b/src/test/correct/basic_assign_assign/clang/basic_assign_assign.expected @@ -8,7 +8,7 @@ var {:extern} R9: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -25,10 +25,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -37,10 +33,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 1bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -76,19 +69,13 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1852bv64) == 1bv8); - free requires (memory_load8_le(mem, 1853bv64) == 0bv8); - free requires (memory_load8_le(mem, 1854bv64) == 2bv8); - free requires (memory_load8_le(mem, 1855bv64) == 0bv8); + free requires (memory_load32_le(mem, 1852bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures ((memory_load32_le(mem, $x_addr) == 5bv32) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_assign_assign/clang/basic_assign_assign_gtirb.expected b/src/test/correct/basic_assign_assign/clang/basic_assign_assign_gtirb.expected index ea5223b80..4b43006e8 100644 --- a/src/test/correct/basic_assign_assign/clang/basic_assign_assign_gtirb.expected +++ b/src/test/correct/basic_assign_assign/clang/basic_assign_assign_gtirb.expected @@ -8,7 +8,7 @@ var {:extern} R9: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -25,10 +25,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -37,10 +33,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 1bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -76,19 +69,13 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1852bv64) == 1bv8); - free requires (memory_load8_le(mem, 1853bv64) == 0bv8); - free requires (memory_load8_le(mem, 1854bv64) == 2bv8); - free requires (memory_load8_le(mem, 1855bv64) == 0bv8); + free requires (memory_load32_le(mem, 1852bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures ((memory_load32_le(mem, $x_addr) == 5bv32) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_assign_assign/clang_pic/basic_assign_assign.expected b/src/test/correct/basic_assign_assign/clang_pic/basic_assign_assign.expected index e25b839d2..254255f97 100644 --- a/src/test/correct/basic_assign_assign/clang_pic/basic_assign_assign.expected +++ b/src/test/correct/basic_assign_assign/clang_pic/basic_assign_assign.expected @@ -8,7 +8,7 @@ var {:extern} R9: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -41,15 +37,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 1bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -81,25 +74,19 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures ((memory_load32_le(mem, $x_addr) == 5bv32) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_assign_assign/clang_pic/basic_assign_assign_gtirb.expected b/src/test/correct/basic_assign_assign/clang_pic/basic_assign_assign_gtirb.expected index 014cfafa6..e43de6329 100644 --- a/src/test/correct/basic_assign_assign/clang_pic/basic_assign_assign_gtirb.expected +++ b/src/test/correct/basic_assign_assign/clang_pic/basic_assign_assign_gtirb.expected @@ -8,7 +8,7 @@ var {:extern} R9: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -41,15 +37,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 1bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -81,25 +74,19 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures ((memory_load32_le(mem, $x_addr) == 5bv32) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_assign_assign/gcc/basic_assign_assign.expected b/src/test/correct/basic_assign_assign/gcc/basic_assign_assign.expected index 38c1cfa92..2bc7acf80 100644 --- a/src/test/correct/basic_assign_assign/gcc/basic_assign_assign.expected +++ b/src/test/correct/basic_assign_assign/gcc/basic_assign_assign.expected @@ -6,7 +6,7 @@ var {:extern} R1: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -23,10 +23,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -35,10 +31,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 1bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -74,19 +67,13 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures ((memory_load32_le(mem, $x_addr) == 5bv32) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_assign_assign/gcc/basic_assign_assign_gtirb.expected b/src/test/correct/basic_assign_assign/gcc/basic_assign_assign_gtirb.expected index 1d976c8ea..163ad1ce6 100644 --- a/src/test/correct/basic_assign_assign/gcc/basic_assign_assign_gtirb.expected +++ b/src/test/correct/basic_assign_assign/gcc/basic_assign_assign_gtirb.expected @@ -6,7 +6,7 @@ var {:extern} R1: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -23,10 +23,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -35,10 +31,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 1bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -74,19 +67,13 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures ((memory_load32_le(mem, $x_addr) == 5bv32) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_assign_assign/gcc_O2/basic_assign_assign.expected b/src/test/correct/basic_assign_assign/gcc_O2/basic_assign_assign.expected index 374ce9ba2..962054198 100644 --- a/src/test/correct/basic_assign_assign/gcc_O2/basic_assign_assign.expected +++ b/src/test/correct/basic_assign_assign/gcc_O2/basic_assign_assign.expected @@ -8,7 +8,7 @@ var {:extern} R2: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -25,10 +25,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -37,10 +33,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 1bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -76,19 +69,13 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures ((memory_load32_le(mem, $x_addr) == 5bv32) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_assign_assign/gcc_O2/basic_assign_assign_gtirb.expected b/src/test/correct/basic_assign_assign/gcc_O2/basic_assign_assign_gtirb.expected index b19a1ad94..b581f7d08 100644 --- a/src/test/correct/basic_assign_assign/gcc_O2/basic_assign_assign_gtirb.expected +++ b/src/test/correct/basic_assign_assign/gcc_O2/basic_assign_assign_gtirb.expected @@ -8,7 +8,7 @@ var {:extern} R2: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -25,10 +25,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -37,10 +33,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 1bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -76,19 +69,13 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures ((memory_load32_le(mem, $x_addr) == 5bv32) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_assign_assign/gcc_pic/basic_assign_assign.expected b/src/test/correct/basic_assign_assign/gcc_pic/basic_assign_assign.expected index ef3087e5a..07cde5e52 100644 --- a/src/test/correct/basic_assign_assign/gcc_pic/basic_assign_assign.expected +++ b/src/test/correct/basic_assign_assign/gcc_pic/basic_assign_assign.expected @@ -6,7 +6,7 @@ var {:extern} R1: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -39,15 +35,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 1bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -79,25 +72,19 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures ((memory_load32_le(mem, $x_addr) == 5bv32) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basic_assign_assign/gcc_pic/basic_assign_assign_gtirb.expected b/src/test/correct/basic_assign_assign/gcc_pic/basic_assign_assign_gtirb.expected index 97ff0a4b6..57456a981 100644 --- a/src/test/correct/basic_assign_assign/gcc_pic/basic_assign_assign_gtirb.expected +++ b/src/test/correct/basic_assign_assign/gcc_pic/basic_assign_assign_gtirb.expected @@ -6,7 +6,7 @@ var {:extern} R1: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -39,15 +35,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 1bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -79,25 +72,19 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures ((memory_load32_le(mem, $x_addr) == 5bv32) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basic_assign_increment/clang/basic_assign_increment.expected b/src/test/correct/basic_assign_increment/clang/basic_assign_increment.expected index a2e5ac8d9..812b609e6 100644 --- a/src/test/correct/basic_assign_increment/clang/basic_assign_increment.expected +++ b/src/test/correct/basic_assign_increment/clang/basic_assign_increment.expected @@ -8,7 +8,7 @@ var {:extern} R9: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -30,10 +30,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,10 +39,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 5bv32)); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -82,19 +75,13 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (((memory_load32_le(mem, $x_addr) == 1bv32) || (memory_load32_le(mem, $x_addr) == 5bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_assign_increment/clang/basic_assign_increment_gtirb.expected b/src/test/correct/basic_assign_increment/clang/basic_assign_increment_gtirb.expected index 4b8981424..ef78d2241 100644 --- a/src/test/correct/basic_assign_increment/clang/basic_assign_increment_gtirb.expected +++ b/src/test/correct/basic_assign_increment/clang/basic_assign_increment_gtirb.expected @@ -8,7 +8,7 @@ var {:extern} R9: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -30,10 +30,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,10 +39,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 5bv32)); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -82,19 +75,13 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (((memory_load32_le(mem, $x_addr) == 1bv32) || (memory_load32_le(mem, $x_addr) == 5bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_assign_increment/clang_pic/basic_assign_increment.expected b/src/test/correct/basic_assign_increment/clang_pic/basic_assign_increment.expected index ae5a4a00a..e4e53c315 100644 --- a/src/test/correct/basic_assign_increment/clang_pic/basic_assign_increment.expected +++ b/src/test/correct/basic_assign_increment/clang_pic/basic_assign_increment.expected @@ -8,7 +8,7 @@ var {:extern} R9: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -34,10 +34,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -47,15 +43,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 5bv32)); - free ensures (memory_load8_le(mem, 1924bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1925bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1926bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1927bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1924bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -87,25 +80,19 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1924bv64) == 1bv8); - free requires (memory_load8_le(mem, 1925bv64) == 0bv8); - free requires (memory_load8_le(mem, 1926bv64) == 2bv8); - free requires (memory_load8_le(mem, 1927bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1924bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (((memory_load32_le(mem, $x_addr) == 1bv32) || (memory_load32_le(mem, $x_addr) == 5bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1924bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1925bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1926bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1927bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1924bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_assign_increment/clang_pic/basic_assign_increment_gtirb.expected b/src/test/correct/basic_assign_increment/clang_pic/basic_assign_increment_gtirb.expected index af0c7855a..330e91d98 100644 --- a/src/test/correct/basic_assign_increment/clang_pic/basic_assign_increment_gtirb.expected +++ b/src/test/correct/basic_assign_increment/clang_pic/basic_assign_increment_gtirb.expected @@ -8,7 +8,7 @@ var {:extern} R9: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -34,10 +34,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -47,15 +43,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 5bv32)); - free ensures (memory_load8_le(mem, 1924bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1925bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1926bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1927bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1924bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -87,25 +80,19 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1924bv64) == 1bv8); - free requires (memory_load8_le(mem, 1925bv64) == 0bv8); - free requires (memory_load8_le(mem, 1926bv64) == 2bv8); - free requires (memory_load8_le(mem, 1927bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1924bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (((memory_load32_le(mem, $x_addr) == 1bv32) || (memory_load32_le(mem, $x_addr) == 5bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1924bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1925bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1926bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1927bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1924bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_assign_increment/gcc/basic_assign_increment.expected b/src/test/correct/basic_assign_increment/gcc/basic_assign_increment.expected index 1b156c659..78dc01415 100644 --- a/src/test/correct/basic_assign_increment/gcc/basic_assign_increment.expected +++ b/src/test/correct/basic_assign_increment/gcc/basic_assign_increment.expected @@ -6,7 +6,7 @@ var {:extern} R1: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -28,10 +28,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -41,10 +37,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 5bv32)); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -80,19 +73,13 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1868bv64) == 1bv8); - free requires (memory_load8_le(mem, 1869bv64) == 0bv8); - free requires (memory_load8_le(mem, 1870bv64) == 2bv8); - free requires (memory_load8_le(mem, 1871bv64) == 0bv8); + free requires (memory_load32_le(mem, 1868bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (((memory_load32_le(mem, $x_addr) == 1bv32) || (memory_load32_le(mem, $x_addr) == 5bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_assign_increment/gcc/basic_assign_increment_gtirb.expected b/src/test/correct/basic_assign_increment/gcc/basic_assign_increment_gtirb.expected index 3130f1279..ca01cce2f 100644 --- a/src/test/correct/basic_assign_increment/gcc/basic_assign_increment_gtirb.expected +++ b/src/test/correct/basic_assign_increment/gcc/basic_assign_increment_gtirb.expected @@ -6,7 +6,7 @@ var {:extern} R1: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -28,10 +28,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -41,10 +37,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 5bv32)); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -80,19 +73,13 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1868bv64) == 1bv8); - free requires (memory_load8_le(mem, 1869bv64) == 0bv8); - free requires (memory_load8_le(mem, 1870bv64) == 2bv8); - free requires (memory_load8_le(mem, 1871bv64) == 0bv8); + free requires (memory_load32_le(mem, 1868bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (((memory_load32_le(mem, $x_addr) == 1bv32) || (memory_load32_le(mem, $x_addr) == 5bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_assign_increment/gcc_O2/basic_assign_increment.expected b/src/test/correct/basic_assign_increment/gcc_O2/basic_assign_increment.expected index 695b9a5d7..960b020dc 100644 --- a/src/test/correct/basic_assign_increment/gcc_O2/basic_assign_increment.expected +++ b/src/test/correct/basic_assign_increment/gcc_O2/basic_assign_increment.expected @@ -8,7 +8,7 @@ var {:extern} R2: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -30,10 +30,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,10 +39,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 5bv32)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -82,19 +75,13 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (((memory_load32_le(mem, $x_addr) == 1bv32) || (memory_load32_le(mem, $x_addr) == 5bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_assign_increment/gcc_O2/basic_assign_increment_gtirb.expected b/src/test/correct/basic_assign_increment/gcc_O2/basic_assign_increment_gtirb.expected index fbd0b4264..04b71a74b 100644 --- a/src/test/correct/basic_assign_increment/gcc_O2/basic_assign_increment_gtirb.expected +++ b/src/test/correct/basic_assign_increment/gcc_O2/basic_assign_increment_gtirb.expected @@ -8,7 +8,7 @@ var {:extern} R2: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -30,10 +30,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,10 +39,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 5bv32)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -82,19 +75,13 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (((memory_load32_le(mem, $x_addr) == 1bv32) || (memory_load32_le(mem, $x_addr) == 5bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_assign_increment/gcc_pic/basic_assign_increment.expected b/src/test/correct/basic_assign_increment/gcc_pic/basic_assign_increment.expected index 284230f1f..7fa9d9aee 100644 --- a/src/test/correct/basic_assign_increment/gcc_pic/basic_assign_increment.expected +++ b/src/test/correct/basic_assign_increment/gcc_pic/basic_assign_increment.expected @@ -6,7 +6,7 @@ var {:extern} R1: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -32,10 +32,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -45,15 +41,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 5bv32)); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -85,25 +78,19 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1932bv64) == 1bv8); - free requires (memory_load8_le(mem, 1933bv64) == 0bv8); - free requires (memory_load8_le(mem, 1934bv64) == 2bv8); - free requires (memory_load8_le(mem, 1935bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1932bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (((memory_load32_le(mem, $x_addr) == 1bv32) || (memory_load32_le(mem, $x_addr) == 5bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basic_assign_increment/gcc_pic/basic_assign_increment_gtirb.expected b/src/test/correct/basic_assign_increment/gcc_pic/basic_assign_increment_gtirb.expected index eec10c1a0..b429d97d2 100644 --- a/src/test/correct/basic_assign_increment/gcc_pic/basic_assign_increment_gtirb.expected +++ b/src/test/correct/basic_assign_increment/gcc_pic/basic_assign_increment_gtirb.expected @@ -6,7 +6,7 @@ var {:extern} R1: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -32,10 +32,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -45,15 +41,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (memory_load32_le(mem, $x_addr) == 5bv32)); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -85,25 +78,19 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1932bv64) == 1bv8); - free requires (memory_load8_le(mem, 1933bv64) == 0bv8); - free requires (memory_load8_le(mem, 1934bv64) == 2bv8); - free requires (memory_load8_le(mem, 1935bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1932bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (((memory_load32_le(mem, $x_addr) == 1bv32) || (memory_load32_le(mem, $x_addr) == 5bv32)) || (memory_load32_le(mem, $x_addr) == 6bv32)); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basic_function_call_caller/clang/basic_function_call_caller.expected b/src/test/correct/basic_function_call_caller/clang/basic_function_call_caller.expected index f38be33df..cb29dbcb0 100644 --- a/src/test/correct/basic_function_call_caller/clang/basic_function_call_caller.expected +++ b/src/test/correct/basic_function_call_caller/clang/basic_function_call_caller.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -47,10 +47,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -65,10 +61,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1820bv64); @@ -107,10 +100,7 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1820bv64); @@ -119,10 +109,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1820bv64); @@ -186,19 +173,13 @@ implementation main() procedure zero(); modifies Gamma_R0, R0; - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1820bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures ((R0[32:0] == 0bv32) && Gamma_R0); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1820bv64); diff --git a/src/test/correct/basic_function_call_caller/clang/basic_function_call_caller_gtirb.expected b/src/test/correct/basic_function_call_caller/clang/basic_function_call_caller_gtirb.expected index 302d78df0..d9bbf3d49 100644 --- a/src/test/correct/basic_function_call_caller/clang/basic_function_call_caller_gtirb.expected +++ b/src/test/correct/basic_function_call_caller/clang/basic_function_call_caller_gtirb.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -47,10 +47,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -65,10 +61,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1820bv64); @@ -107,10 +100,7 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1820bv64); @@ -119,10 +109,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1820bv64); @@ -186,19 +173,13 @@ implementation main() procedure zero(); modifies Gamma_R0, R0; - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1820bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures ((R0[32:0] == 0bv32) && Gamma_R0); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1820bv64); diff --git a/src/test/correct/basic_function_call_caller/clang_O2/basic_function_call_caller.expected b/src/test/correct/basic_function_call_caller/clang_O2/basic_function_call_caller.expected index 959449774..21551088c 100644 --- a/src/test/correct/basic_function_call_caller/clang_O2/basic_function_call_caller.expected +++ b/src/test/correct/basic_function_call_caller/clang_O2/basic_function_call_caller.expected @@ -12,8 +12,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -47,10 +43,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1820bv64); @@ -89,18 +82,12 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1868bv64) == 1bv8); - free requires (memory_load8_le(mem, 1869bv64) == 0bv8); - free requires (memory_load8_le(mem, 1870bv64) == 2bv8); - free requires (memory_load8_le(mem, 1871bv64) == 0bv8); + free requires (memory_load32_le(mem, 1868bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1820bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1820bv64); diff --git a/src/test/correct/basic_function_call_caller/clang_O2/basic_function_call_caller_gtirb.expected b/src/test/correct/basic_function_call_caller/clang_O2/basic_function_call_caller_gtirb.expected index 7368de672..60c056c27 100644 --- a/src/test/correct/basic_function_call_caller/clang_O2/basic_function_call_caller_gtirb.expected +++ b/src/test/correct/basic_function_call_caller/clang_O2/basic_function_call_caller_gtirb.expected @@ -12,8 +12,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -47,10 +43,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1820bv64); @@ -89,18 +82,12 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1868bv64) == 1bv8); - free requires (memory_load8_le(mem, 1869bv64) == 0bv8); - free requires (memory_load8_le(mem, 1870bv64) == 2bv8); - free requires (memory_load8_le(mem, 1871bv64) == 0bv8); + free requires (memory_load32_le(mem, 1868bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1820bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1820bv64); diff --git a/src/test/correct/basic_function_call_caller/clang_pic/basic_function_call_caller.expected b/src/test/correct/basic_function_call_caller/clang_pic/basic_function_call_caller.expected index dc1090bb5..826223e20 100644 --- a/src/test/correct/basic_function_call_caller/clang_pic/basic_function_call_caller.expected +++ b/src/test/correct/basic_function_call_caller/clang_pic/basic_function_call_caller.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -47,10 +47,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -65,16 +61,13 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1968bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1969bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1970bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1971bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + free ensures (memory_load32_le(mem, 1968bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1884bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -109,30 +102,24 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1968bv64) == 1bv8); - free requires (memory_load8_le(mem, 1969bv64) == 0bv8); - free requires (memory_load8_le(mem, 1970bv64) == 2bv8); - free requires (memory_load8_le(mem, 1971bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); + free requires (memory_load32_le(mem, 1968bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1884bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1968bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1969bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1970bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1971bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + free ensures (memory_load32_le(mem, 1968bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1884bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { @@ -196,27 +183,21 @@ implementation main() procedure zero(); modifies Gamma_R0, R0; - free requires (memory_load8_le(mem, 1968bv64) == 1bv8); - free requires (memory_load8_le(mem, 1969bv64) == 0bv8); - free requires (memory_load8_le(mem, 1970bv64) == 2bv8); - free requires (memory_load8_le(mem, 1971bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); + free requires (memory_load32_le(mem, 1968bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1884bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures ((R0[32:0] == 0bv32) && Gamma_R0); - free ensures (memory_load8_le(mem, 1968bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1969bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1970bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1971bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + free ensures (memory_load32_le(mem, 1968bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1884bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation zero() { diff --git a/src/test/correct/basic_function_call_caller/clang_pic/basic_function_call_caller_gtirb.expected b/src/test/correct/basic_function_call_caller/clang_pic/basic_function_call_caller_gtirb.expected index fe88b304a..8d53aaa0e 100644 --- a/src/test/correct/basic_function_call_caller/clang_pic/basic_function_call_caller_gtirb.expected +++ b/src/test/correct/basic_function_call_caller/clang_pic/basic_function_call_caller_gtirb.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -47,10 +47,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -65,16 +61,13 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1968bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1969bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1970bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1971bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + free ensures (memory_load32_le(mem, 1968bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1884bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -106,27 +99,21 @@ implementation {:extern} guarantee_reflexive() procedure zero(); modifies Gamma_R0, R0; - free requires (memory_load8_le(mem, 1968bv64) == 1bv8); - free requires (memory_load8_le(mem, 1969bv64) == 0bv8); - free requires (memory_load8_le(mem, 1970bv64) == 2bv8); - free requires (memory_load8_le(mem, 1971bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); + free requires (memory_load32_le(mem, 1968bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1884bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures ((R0[32:0] == 0bv32) && Gamma_R0); - free ensures (memory_load8_le(mem, 1968bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1969bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1970bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1971bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + free ensures (memory_load32_le(mem, 1968bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1884bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation zero() { @@ -144,30 +131,24 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1968bv64) == 1bv8); - free requires (memory_load8_le(mem, 1969bv64) == 0bv8); - free requires (memory_load8_le(mem, 1970bv64) == 2bv8); - free requires (memory_load8_le(mem, 1971bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); + free requires (memory_load32_le(mem, 1968bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1884bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1968bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1969bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1970bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1971bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + free ensures (memory_load32_le(mem, 1968bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1884bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_function_call_caller/gcc/basic_function_call_caller.expected b/src/test/correct/basic_function_call_caller/gcc/basic_function_call_caller.expected index 4d6d47afc..617c5bbf6 100644 --- a/src/test/correct/basic_function_call_caller/gcc/basic_function_call_caller.expected +++ b/src/test/correct/basic_function_call_caller/gcc/basic_function_call_caller.expected @@ -16,8 +16,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -45,10 +45,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -63,10 +59,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1900bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1901bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1902bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1903bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1900bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1820bv64); @@ -105,10 +98,7 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1900bv64) == 1bv8); - free requires (memory_load8_le(mem, 1901bv64) == 0bv8); - free requires (memory_load8_le(mem, 1902bv64) == 2bv8); - free requires (memory_load8_le(mem, 1903bv64) == 0bv8); + free requires (memory_load32_le(mem, 1900bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1820bv64); @@ -117,10 +107,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1900bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1901bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1902bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1903bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1900bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1820bv64); @@ -184,19 +171,13 @@ implementation main() procedure zero(); modifies Gamma_R0, R0; - free requires (memory_load8_le(mem, 1900bv64) == 1bv8); - free requires (memory_load8_le(mem, 1901bv64) == 0bv8); - free requires (memory_load8_le(mem, 1902bv64) == 2bv8); - free requires (memory_load8_le(mem, 1903bv64) == 0bv8); + free requires (memory_load32_le(mem, 1900bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1820bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures ((R0[32:0] == 0bv32) && Gamma_R0); - free ensures (memory_load8_le(mem, 1900bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1901bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1902bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1903bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1900bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1820bv64); diff --git a/src/test/correct/basic_function_call_caller/gcc/basic_function_call_caller_gtirb.expected b/src/test/correct/basic_function_call_caller/gcc/basic_function_call_caller_gtirb.expected index aff11e014..ce6869d8e 100644 --- a/src/test/correct/basic_function_call_caller/gcc/basic_function_call_caller_gtirb.expected +++ b/src/test/correct/basic_function_call_caller/gcc/basic_function_call_caller_gtirb.expected @@ -16,8 +16,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -45,10 +45,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -63,10 +59,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1900bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1901bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1902bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1903bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1900bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1820bv64); @@ -105,10 +98,7 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1900bv64) == 1bv8); - free requires (memory_load8_le(mem, 1901bv64) == 0bv8); - free requires (memory_load8_le(mem, 1902bv64) == 2bv8); - free requires (memory_load8_le(mem, 1903bv64) == 0bv8); + free requires (memory_load32_le(mem, 1900bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1820bv64); @@ -117,10 +107,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1900bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1901bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1902bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1903bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1900bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1820bv64); @@ -184,19 +171,13 @@ implementation main() procedure zero(); modifies Gamma_R0, R0; - free requires (memory_load8_le(mem, 1900bv64) == 1bv8); - free requires (memory_load8_le(mem, 1901bv64) == 0bv8); - free requires (memory_load8_le(mem, 1902bv64) == 2bv8); - free requires (memory_load8_le(mem, 1903bv64) == 0bv8); + free requires (memory_load32_le(mem, 1900bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1820bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures ((R0[32:0] == 0bv32) && Gamma_R0); - free ensures (memory_load8_le(mem, 1900bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1901bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1902bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1903bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1900bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1820bv64); diff --git a/src/test/correct/basic_function_call_caller/gcc_O2/basic_function_call_caller.expected b/src/test/correct/basic_function_call_caller/gcc_O2/basic_function_call_caller.expected index d47bc54a9..19ed4fbb5 100644 --- a/src/test/correct/basic_function_call_caller/gcc_O2/basic_function_call_caller.expected +++ b/src/test/correct/basic_function_call_caller/gcc_O2/basic_function_call_caller.expected @@ -12,8 +12,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -47,10 +43,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -89,18 +82,12 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1916bv64) == 1bv8); - free requires (memory_load8_le(mem, 1917bv64) == 0bv8); - free requires (memory_load8_le(mem, 1918bv64) == 2bv8); - free requires (memory_load8_le(mem, 1919bv64) == 0bv8); + free requires (memory_load32_le(mem, 1916bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_function_call_caller/gcc_O2/basic_function_call_caller_gtirb.expected b/src/test/correct/basic_function_call_caller/gcc_O2/basic_function_call_caller_gtirb.expected index d6a304240..e86cb65ba 100644 --- a/src/test/correct/basic_function_call_caller/gcc_O2/basic_function_call_caller_gtirb.expected +++ b/src/test/correct/basic_function_call_caller/gcc_O2/basic_function_call_caller_gtirb.expected @@ -12,8 +12,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -47,10 +43,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -89,18 +82,12 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1916bv64) == 1bv8); - free requires (memory_load8_le(mem, 1917bv64) == 0bv8); - free requires (memory_load8_le(mem, 1918bv64) == 2bv8); - free requires (memory_load8_le(mem, 1919bv64) == 0bv8); + free requires (memory_load32_le(mem, 1916bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_function_call_caller/gcc_pic/basic_function_call_caller.expected b/src/test/correct/basic_function_call_caller/gcc_pic/basic_function_call_caller.expected index 4928f00aa..14f153a77 100644 --- a/src/test/correct/basic_function_call_caller/gcc_pic/basic_function_call_caller.expected +++ b/src/test/correct/basic_function_call_caller/gcc_pic/basic_function_call_caller.expected @@ -16,8 +16,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -45,10 +45,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -63,16 +59,13 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1884bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1884bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -107,30 +100,24 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1884bv64); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1884bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1884bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1884bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { @@ -192,27 +179,21 @@ implementation main() procedure zero(); modifies Gamma_R0, R0; - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1884bv64); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1884bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures ((R0[32:0] == 0bv32) && Gamma_R0); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1884bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1884bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation zero() { diff --git a/src/test/correct/basic_function_call_caller/gcc_pic/basic_function_call_caller_gtirb.expected b/src/test/correct/basic_function_call_caller/gcc_pic/basic_function_call_caller_gtirb.expected index 81c0626c8..1bfcdb325 100644 --- a/src/test/correct/basic_function_call_caller/gcc_pic/basic_function_call_caller_gtirb.expected +++ b/src/test/correct/basic_function_call_caller/gcc_pic/basic_function_call_caller_gtirb.expected @@ -16,8 +16,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -45,10 +45,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -63,16 +59,13 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1884bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1884bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -104,27 +97,21 @@ implementation {:extern} guarantee_reflexive() procedure zero(); modifies Gamma_R0, R0; - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1884bv64); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1884bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures ((R0[32:0] == 0bv32) && Gamma_R0); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1884bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1884bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation zero() { @@ -142,30 +129,24 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1884bv64); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1884bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1884bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1884bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basic_function_call_reader/clang/basic_function_call_reader.expected b/src/test/correct/basic_function_call_reader/clang/basic_function_call_reader.expected index fc411c73f..04f4f6581 100644 --- a/src/test/correct/basic_function_call_reader/clang/basic_function_call_reader.expected +++ b/src/test/correct/basic_function_call_reader/clang/basic_function_call_reader.expected @@ -20,8 +20,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -47,10 +47,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -63,10 +59,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == 0bv32) ==> (memory_load32_le(mem, $x_addr) == 0bv32)); ensures (old(gamma_load32(Gamma_mem, $y_addr)) ==> ((memory_load32_le(mem, $x_addr) == 0bv32) || gamma_load32(Gamma_mem, $y_addr))); - free ensures (memory_load8_le(mem, 1908bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1909bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1910bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1911bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1908bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -104,20 +97,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1908bv64) == 1bv8); - free requires (memory_load8_le(mem, 1909bv64) == 0bv8); - free requires (memory_load8_le(mem, 1910bv64) == 2bv8); - free requires (memory_load8_le(mem, 1911bv64) == 0bv8); + free requires (memory_load32_le(mem, 1908bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1908bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1909bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1910bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1911bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1908bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_function_call_reader/clang/basic_function_call_reader_gtirb.expected b/src/test/correct/basic_function_call_reader/clang/basic_function_call_reader_gtirb.expected index 40693258f..ee71f1b77 100644 --- a/src/test/correct/basic_function_call_reader/clang/basic_function_call_reader_gtirb.expected +++ b/src/test/correct/basic_function_call_reader/clang/basic_function_call_reader_gtirb.expected @@ -20,8 +20,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -46,10 +46,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -61,10 +57,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == 0bv32) ==> (memory_load32_le(mem, $x_addr) == 0bv32)); ensures (old(gamma_load32(Gamma_mem, $y_addr)) ==> ((memory_load32_le(mem, $x_addr) == 0bv32) || gamma_load32(Gamma_mem, $y_addr))); - free ensures (memory_load8_le(mem, 1908bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1909bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1910bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1911bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1908bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -102,20 +95,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1908bv64) == 1bv8); - free requires (memory_load8_le(mem, 1909bv64) == 0bv8); - free requires (memory_load8_le(mem, 1910bv64) == 2bv8); - free requires (memory_load8_le(mem, 1911bv64) == 0bv8); + free requires (memory_load32_le(mem, 1908bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1908bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1909bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1910bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1911bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1908bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_function_call_reader/clang_O2/basic_function_call_reader.expected b/src/test/correct/basic_function_call_reader/clang_O2/basic_function_call_reader.expected index 3ff9f7584..4e8bfd232 100644 --- a/src/test/correct/basic_function_call_reader/clang_O2/basic_function_call_reader.expected +++ b/src/test/correct/basic_function_call_reader/clang_O2/basic_function_call_reader.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -41,10 +41,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} {:bvbuiltin "sign_extend 1"} sign_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); @@ -53,10 +49,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == 0bv32) ==> (memory_load32_le(mem, $x_addr) == 0bv32)); ensures (old(gamma_load32(Gamma_mem, $y_addr)) ==> ((memory_load32_le(mem, $x_addr) == 0bv32) || gamma_load32(Gamma_mem, $y_addr))); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -94,18 +87,12 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R8, Gamma_R9, Gamma_VF, Gamma_ZF, Gamma_mem, NF, R0, R8, R9, VF, ZF, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1860bv64) == 1bv8); - free requires (memory_load8_le(mem, 1861bv64) == 0bv8); - free requires (memory_load8_le(mem, 1862bv64) == 2bv8); - free requires (memory_load8_le(mem, 1863bv64) == 0bv8); + free requires (memory_load32_le(mem, 1860bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_function_call_reader/clang_O2/basic_function_call_reader_gtirb.expected b/src/test/correct/basic_function_call_reader/clang_O2/basic_function_call_reader_gtirb.expected index 3b8806447..e49a87798 100644 --- a/src/test/correct/basic_function_call_reader/clang_O2/basic_function_call_reader_gtirb.expected +++ b/src/test/correct/basic_function_call_reader/clang_O2/basic_function_call_reader_gtirb.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -40,10 +40,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); @@ -51,10 +47,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == 0bv32) ==> (memory_load32_le(mem, $x_addr) == 0bv32)); ensures (old(gamma_load32(Gamma_mem, $y_addr)) ==> ((memory_load32_le(mem, $x_addr) == 0bv32) || gamma_load32(Gamma_mem, $y_addr))); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -92,18 +85,12 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R8, Gamma_R9, Gamma_VF, Gamma_ZF, Gamma_mem, NF, R0, R8, R9, VF, ZF, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1860bv64) == 1bv8); - free requires (memory_load8_le(mem, 1861bv64) == 0bv8); - free requires (memory_load8_le(mem, 1862bv64) == 2bv8); - free requires (memory_load8_le(mem, 1863bv64) == 0bv8); + free requires (memory_load32_le(mem, 1860bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_function_call_reader/clang_pic/basic_function_call_reader.expected b/src/test/correct/basic_function_call_reader/clang_pic/basic_function_call_reader.expected index c739e16a6..1e3a52e27 100644 --- a/src/test/correct/basic_function_call_reader/clang_pic/basic_function_call_reader.expected +++ b/src/test/correct/basic_function_call_reader/clang_pic/basic_function_call_reader.expected @@ -20,8 +20,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -51,10 +51,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -67,16 +63,13 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == 0bv32) ==> (memory_load32_le(mem, $x_addr) == 0bv32)); ensures (old(gamma_load32(Gamma_mem, $y_addr)) ==> ((memory_load32_le(mem, $x_addr) == 0bv32) || gamma_load32(Gamma_mem, $y_addr))); - free ensures (memory_load8_le(mem, 1980bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1981bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1982bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1983bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); + free ensures (memory_load32_le(mem, 1980bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -110,28 +103,22 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1980bv64) == 1bv8); - free requires (memory_load8_le(mem, 1981bv64) == 0bv8); - free requires (memory_load8_le(mem, 1982bv64) == 2bv8); - free requires (memory_load8_le(mem, 1983bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); + free requires (memory_load32_le(mem, 1980bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69688bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1980bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1981bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1982bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1983bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); + free ensures (memory_load32_le(mem, 1980bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_function_call_reader/clang_pic/basic_function_call_reader_gtirb.expected b/src/test/correct/basic_function_call_reader/clang_pic/basic_function_call_reader_gtirb.expected index 18e1b94d5..2cfb9fb78 100644 --- a/src/test/correct/basic_function_call_reader/clang_pic/basic_function_call_reader_gtirb.expected +++ b/src/test/correct/basic_function_call_reader/clang_pic/basic_function_call_reader_gtirb.expected @@ -20,8 +20,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -50,10 +50,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -65,16 +61,13 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == 0bv32) ==> (memory_load32_le(mem, $x_addr) == 0bv32)); ensures (old(gamma_load32(Gamma_mem, $y_addr)) ==> ((memory_load32_le(mem, $x_addr) == 0bv32) || gamma_load32(Gamma_mem, $y_addr))); - free ensures (memory_load8_le(mem, 1980bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1981bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1982bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1983bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); + free ensures (memory_load32_le(mem, 1980bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -108,28 +101,22 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1980bv64) == 1bv8); - free requires (memory_load8_le(mem, 1981bv64) == 0bv8); - free requires (memory_load8_le(mem, 1982bv64) == 2bv8); - free requires (memory_load8_le(mem, 1983bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); + free requires (memory_load32_le(mem, 1980bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69688bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1980bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1981bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1982bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1983bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); + free ensures (memory_load32_le(mem, 1980bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_function_call_reader/gcc/basic_function_call_reader.expected b/src/test/correct/basic_function_call_reader/gcc/basic_function_call_reader.expected index 874baad70..0298d6790 100644 --- a/src/test/correct/basic_function_call_reader/gcc/basic_function_call_reader.expected +++ b/src/test/correct/basic_function_call_reader/gcc/basic_function_call_reader.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -45,10 +45,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -61,10 +57,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == 0bv32) ==> (memory_load32_le(mem, $x_addr) == 0bv32)); ensures (old(gamma_load32(Gamma_mem, $y_addr)) ==> ((memory_load32_le(mem, $x_addr) == 0bv32) || gamma_load32(Gamma_mem, $y_addr))); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -102,20 +95,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1892bv64) == 1bv8); - free requires (memory_load8_le(mem, 1893bv64) == 0bv8); - free requires (memory_load8_le(mem, 1894bv64) == 2bv8); - free requires (memory_load8_le(mem, 1895bv64) == 0bv8); + free requires (memory_load32_le(mem, 1892bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_function_call_reader/gcc/basic_function_call_reader_gtirb.expected b/src/test/correct/basic_function_call_reader/gcc/basic_function_call_reader_gtirb.expected index 36c5088ff..6c7a57163 100644 --- a/src/test/correct/basic_function_call_reader/gcc/basic_function_call_reader_gtirb.expected +++ b/src/test/correct/basic_function_call_reader/gcc/basic_function_call_reader_gtirb.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -44,10 +44,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -59,10 +55,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == 0bv32) ==> (memory_load32_le(mem, $x_addr) == 0bv32)); ensures (old(gamma_load32(Gamma_mem, $y_addr)) ==> ((memory_load32_le(mem, $x_addr) == 0bv32) || gamma_load32(Gamma_mem, $y_addr))); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -100,20 +93,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1892bv64) == 1bv8); - free requires (memory_load8_le(mem, 1893bv64) == 0bv8); - free requires (memory_load8_le(mem, 1894bv64) == 2bv8); - free requires (memory_load8_le(mem, 1895bv64) == 0bv8); + free requires (memory_load32_le(mem, 1892bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_function_call_reader/gcc_O2/basic_function_call_reader.expected b/src/test/correct/basic_function_call_reader/gcc_O2/basic_function_call_reader.expected index daf05e75b..24b255d49 100644 --- a/src/test/correct/basic_function_call_reader/gcc_O2/basic_function_call_reader.expected +++ b/src/test/correct/basic_function_call_reader/gcc_O2/basic_function_call_reader.expected @@ -8,8 +8,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -26,20 +26,13 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == 0bv32) ==> (memory_load32_le(mem, $x_addr) == 0bv32)); ensures (old(gamma_load32(Gamma_mem, $y_addr)) ==> ((memory_load32_le(mem, $x_addr) == 0bv32) || gamma_load32(Gamma_mem, $y_addr))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -77,18 +70,12 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_function_call_reader/gcc_O2/basic_function_call_reader_gtirb.expected b/src/test/correct/basic_function_call_reader/gcc_O2/basic_function_call_reader_gtirb.expected index 404c6e24c..e70c005df 100644 --- a/src/test/correct/basic_function_call_reader/gcc_O2/basic_function_call_reader_gtirb.expected +++ b/src/test/correct/basic_function_call_reader/gcc_O2/basic_function_call_reader_gtirb.expected @@ -8,8 +8,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -25,20 +25,13 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == 0bv32) ==> (memory_load32_le(mem, $x_addr) == 0bv32)); ensures (old(gamma_load32(Gamma_mem, $y_addr)) ==> ((memory_load32_le(mem, $x_addr) == 0bv32) || gamma_load32(Gamma_mem, $y_addr))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -76,18 +69,12 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_function_call_reader/gcc_pic/basic_function_call_reader.expected b/src/test/correct/basic_function_call_reader/gcc_pic/basic_function_call_reader.expected index 4dee04924..8b7fd2872 100644 --- a/src/test/correct/basic_function_call_reader/gcc_pic/basic_function_call_reader.expected +++ b/src/test/correct/basic_function_call_reader/gcc_pic/basic_function_call_reader.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -49,10 +49,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -65,16 +61,13 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == 0bv32) ==> (memory_load32_le(mem, $x_addr) == 0bv32)); ensures (old(gamma_load32(Gamma_mem, $y_addr)) ==> ((memory_load32_le(mem, $x_addr) == 0bv32) || gamma_load32(Gamma_mem, $y_addr))); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -108,28 +101,22 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1956bv64) == 1bv8); - free requires (memory_load8_le(mem, 1957bv64) == 0bv8); - free requires (memory_load8_le(mem, 1958bv64) == 2bv8); - free requires (memory_load8_le(mem, 1959bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1956bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basic_function_call_reader/gcc_pic/basic_function_call_reader_gtirb.expected b/src/test/correct/basic_function_call_reader/gcc_pic/basic_function_call_reader_gtirb.expected index 080e84267..fece62d2b 100644 --- a/src/test/correct/basic_function_call_reader/gcc_pic/basic_function_call_reader_gtirb.expected +++ b/src/test/correct/basic_function_call_reader/gcc_pic/basic_function_call_reader_gtirb.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $y_addr) then (memory_load32_le(memory, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $y_addr) then (memory_load32_le(mem$in, $x_addr) == 1bv32) else (if (index == $x_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -48,10 +48,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -63,16 +59,13 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == 0bv32) ==> (memory_load32_le(mem, $x_addr) == 0bv32)); ensures (old(gamma_load32(Gamma_mem, $y_addr)) ==> ((memory_load32_le(mem, $x_addr) == 0bv32) || gamma_load32(Gamma_mem, $y_addr))); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -106,28 +99,22 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1956bv64) == 1bv8); - free requires (memory_load8_le(mem, 1957bv64) == 0bv8); - free requires (memory_load8_le(mem, 1958bv64) == 2bv8); - free requires (memory_load8_le(mem, 1959bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1956bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basic_lock_read/clang/basic_lock_read.expected b/src/test/correct/basic_lock_read/clang/basic_lock_read.expected index 4b0e5107e..318bf0e75 100644 --- a/src/test/correct/basic_lock_read/clang/basic_lock_read.expected +++ b/src/test/correct/basic_lock_read/clang/basic_lock_read.expected @@ -20,7 +20,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -47,10 +47,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -62,10 +58,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1900bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1901bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1902bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1903bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1900bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -100,10 +93,7 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1900bv64) == 1bv8); - free requires (memory_load8_le(mem, 1901bv64) == 0bv8); - free requires (memory_load8_le(mem, 1902bv64) == 2bv8); - free requires (memory_load8_le(mem, 1903bv64) == 0bv8); + free requires (memory_load32_le(mem, 1900bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -111,10 +101,7 @@ procedure main(); ensures (R0[32:0] == 0bv32); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1900bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1901bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1902bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1903bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1900bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_lock_read/clang/basic_lock_read_gtirb.expected b/src/test/correct/basic_lock_read/clang/basic_lock_read_gtirb.expected index 613afcdf3..e4f7d61f9 100644 --- a/src/test/correct/basic_lock_read/clang/basic_lock_read_gtirb.expected +++ b/src/test/correct/basic_lock_read/clang/basic_lock_read_gtirb.expected @@ -20,7 +20,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -46,10 +46,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -60,10 +56,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1900bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1901bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1902bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1903bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1900bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -98,10 +91,7 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1900bv64) == 1bv8); - free requires (memory_load8_le(mem, 1901bv64) == 0bv8); - free requires (memory_load8_le(mem, 1902bv64) == 2bv8); - free requires (memory_load8_le(mem, 1903bv64) == 0bv8); + free requires (memory_load32_le(mem, 1900bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -109,10 +99,7 @@ procedure main(); ensures (R0[32:0] == 0bv32); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1900bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1901bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1902bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1903bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1900bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_lock_read/clang_O2/basic_lock_read.expected b/src/test/correct/basic_lock_read/clang_O2/basic_lock_read.expected index 95305362e..2391ae1ea 100644 --- a/src/test/correct/basic_lock_read/clang_O2/basic_lock_read.expected +++ b/src/test/correct/basic_lock_read/clang_O2/basic_lock_read.expected @@ -8,7 +8,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -30,10 +30,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,10 +39,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -81,19 +74,13 @@ procedure main(); modifies Gamma_R0, Gamma_R8, Gamma_mem, R0, R8, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1868bv64) == 1bv8); - free requires (memory_load8_le(mem, 1869bv64) == 0bv8); - free requires (memory_load8_le(mem, 1870bv64) == 2bv8); - free requires (memory_load8_le(mem, 1871bv64) == 0bv8); + free requires (memory_load32_le(mem, 1868bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (R0[32:0] == 0bv32); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_lock_read/clang_O2/basic_lock_read_gtirb.expected b/src/test/correct/basic_lock_read/clang_O2/basic_lock_read_gtirb.expected index 7d1102980..117b59484 100644 --- a/src/test/correct/basic_lock_read/clang_O2/basic_lock_read_gtirb.expected +++ b/src/test/correct/basic_lock_read/clang_O2/basic_lock_read_gtirb.expected @@ -8,7 +8,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -42,10 +38,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -80,19 +73,13 @@ procedure main(); modifies Gamma_R0, Gamma_R8, Gamma_mem, R0, R8, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1868bv64) == 1bv8); - free requires (memory_load8_le(mem, 1869bv64) == 0bv8); - free requires (memory_load8_le(mem, 1870bv64) == 2bv8); - free requires (memory_load8_le(mem, 1871bv64) == 0bv8); + free requires (memory_load32_le(mem, 1868bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (R0[32:0] == 0bv32); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_lock_read/clang_pic/basic_lock_read.expected b/src/test/correct/basic_lock_read/clang_pic/basic_lock_read.expected index 6a6ad0a7b..a46a23494 100644 --- a/src/test/correct/basic_lock_read/clang_pic/basic_lock_read.expected +++ b/src/test/correct/basic_lock_read/clang_pic/basic_lock_read.expected @@ -20,7 +20,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -51,10 +51,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -66,16 +62,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1972bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1973bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1974bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1975bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1972bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -106,29 +99,23 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1972bv64) == 1bv8); - free requires (memory_load8_le(mem, 1973bv64) == 0bv8); - free requires (memory_load8_le(mem, 1974bv64) == 2bv8); - free requires (memory_load8_le(mem, 1975bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69688bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1972bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (R0[32:0] == 0bv32); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1972bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1973bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1974bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1975bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1972bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_lock_read/clang_pic/basic_lock_read_gtirb.expected b/src/test/correct/basic_lock_read/clang_pic/basic_lock_read_gtirb.expected index de5c71aa8..141222e9a 100644 --- a/src/test/correct/basic_lock_read/clang_pic/basic_lock_read_gtirb.expected +++ b/src/test/correct/basic_lock_read/clang_pic/basic_lock_read_gtirb.expected @@ -20,7 +20,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -50,10 +50,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -64,16 +60,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1972bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1973bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1974bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1975bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1972bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -104,29 +97,23 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1972bv64) == 1bv8); - free requires (memory_load8_le(mem, 1973bv64) == 0bv8); - free requires (memory_load8_le(mem, 1974bv64) == 2bv8); - free requires (memory_load8_le(mem, 1975bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69688bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1972bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (R0[32:0] == 0bv32); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1972bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1973bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1974bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1975bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1972bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_lock_read/gcc/basic_lock_read.expected b/src/test/correct/basic_lock_read/gcc/basic_lock_read.expected index a99b5a5e3..c4a5478c8 100644 --- a/src/test/correct/basic_lock_read/gcc/basic_lock_read.expected +++ b/src/test/correct/basic_lock_read/gcc/basic_lock_read.expected @@ -18,7 +18,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -45,10 +45,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -60,10 +56,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1900bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1901bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1902bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1903bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1900bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -98,10 +91,7 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1900bv64) == 1bv8); - free requires (memory_load8_le(mem, 1901bv64) == 0bv8); - free requires (memory_load8_le(mem, 1902bv64) == 2bv8); - free requires (memory_load8_le(mem, 1903bv64) == 0bv8); + free requires (memory_load32_le(mem, 1900bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -109,10 +99,7 @@ procedure main(); ensures (R0[32:0] == 0bv32); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1900bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1901bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1902bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1903bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1900bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_lock_read/gcc/basic_lock_read_gtirb.expected b/src/test/correct/basic_lock_read/gcc/basic_lock_read_gtirb.expected index e7c5751a8..79302e24b 100644 --- a/src/test/correct/basic_lock_read/gcc/basic_lock_read_gtirb.expected +++ b/src/test/correct/basic_lock_read/gcc/basic_lock_read_gtirb.expected @@ -18,7 +18,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -44,10 +44,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -58,10 +54,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1900bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1901bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1902bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1903bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1900bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -96,10 +89,7 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1900bv64) == 1bv8); - free requires (memory_load8_le(mem, 1901bv64) == 0bv8); - free requires (memory_load8_le(mem, 1902bv64) == 2bv8); - free requires (memory_load8_le(mem, 1903bv64) == 0bv8); + free requires (memory_load32_le(mem, 1900bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -107,10 +97,7 @@ procedure main(); ensures (R0[32:0] == 0bv32); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1900bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1901bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1902bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1903bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1900bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_lock_read/gcc_O2/basic_lock_read.expected b/src/test/correct/basic_lock_read/gcc_O2/basic_lock_read.expected index d2727ac66..4a49039ba 100644 --- a/src/test/correct/basic_lock_read/gcc_O2/basic_lock_read.expected +++ b/src/test/correct/basic_lock_read/gcc_O2/basic_lock_read.expected @@ -8,7 +8,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69656bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -44,10 +40,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -82,19 +75,13 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (R0[32:0] == 0bv32); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_lock_read/gcc_O2/basic_lock_read_gtirb.expected b/src/test/correct/basic_lock_read/gcc_O2/basic_lock_read_gtirb.expected index 748845185..0376b9c06 100644 --- a/src/test/correct/basic_lock_read/gcc_O2/basic_lock_read_gtirb.expected +++ b/src/test/correct/basic_lock_read/gcc_O2/basic_lock_read_gtirb.expected @@ -8,7 +8,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69656bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -42,10 +38,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -80,19 +73,13 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (R0[32:0] == 0bv32); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_lock_read/gcc_pic/basic_lock_read.expected b/src/test/correct/basic_lock_read/gcc_pic/basic_lock_read.expected index c8e365ec9..857950775 100644 --- a/src/test/correct/basic_lock_read/gcc_pic/basic_lock_read.expected +++ b/src/test/correct/basic_lock_read/gcc_pic/basic_lock_read.expected @@ -18,7 +18,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -49,10 +49,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -64,16 +60,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -104,29 +97,23 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (R0[32:0] == 0bv32); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basic_lock_read/gcc_pic/basic_lock_read_gtirb.expected b/src/test/correct/basic_lock_read/gcc_pic/basic_lock_read_gtirb.expected index 033884fa7..7234dcc85 100644 --- a/src/test/correct/basic_lock_read/gcc_pic/basic_lock_read_gtirb.expected +++ b/src/test/correct/basic_lock_read/gcc_pic/basic_lock_read_gtirb.expected @@ -18,7 +18,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -48,10 +48,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -62,16 +58,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -102,29 +95,23 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (R0[32:0] == 0bv32); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basic_lock_security_read/clang/basic_lock_security_read.expected b/src/test/correct/basic_lock_security_read/clang/basic_lock_security_read.expected index 06964678d..0bc9f6c39 100644 --- a/src/test/correct/basic_lock_security_read/clang/basic_lock_security_read.expected +++ b/src/test/correct/basic_lock_security_read/clang/basic_lock_security_read.expected @@ -20,8 +20,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -47,10 +47,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -62,10 +58,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -100,20 +93,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_lock_security_read/clang/basic_lock_security_read_gtirb.expected b/src/test/correct/basic_lock_security_read/clang/basic_lock_security_read_gtirb.expected index c05516f99..689765c63 100644 --- a/src/test/correct/basic_lock_security_read/clang/basic_lock_security_read_gtirb.expected +++ b/src/test/correct/basic_lock_security_read/clang/basic_lock_security_read_gtirb.expected @@ -20,8 +20,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -46,10 +46,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -60,10 +56,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -98,20 +91,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_lock_security_read/clang_O2/basic_lock_security_read.expected b/src/test/correct/basic_lock_security_read/clang_O2/basic_lock_security_read.expected index ced4f97ac..1732e2d7d 100644 --- a/src/test/correct/basic_lock_security_read/clang_O2/basic_lock_security_read.expected +++ b/src/test/correct/basic_lock_security_read/clang_O2/basic_lock_security_read.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -41,10 +41,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} {:bvbuiltin "sign_extend 1"} sign_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); @@ -52,10 +48,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -90,18 +83,12 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R8, Gamma_R9, Gamma_VF, Gamma_ZF, Gamma_mem, NF, R0, R8, R9, VF, ZF, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1860bv64) == 1bv8); - free requires (memory_load8_le(mem, 1861bv64) == 0bv8); - free requires (memory_load8_le(mem, 1862bv64) == 2bv8); - free requires (memory_load8_le(mem, 1863bv64) == 0bv8); + free requires (memory_load32_le(mem, 1860bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_lock_security_read/clang_O2/basic_lock_security_read_gtirb.expected b/src/test/correct/basic_lock_security_read/clang_O2/basic_lock_security_read_gtirb.expected index 98833ec9e..5f851e084 100644 --- a/src/test/correct/basic_lock_security_read/clang_O2/basic_lock_security_read_gtirb.expected +++ b/src/test/correct/basic_lock_security_read/clang_O2/basic_lock_security_read_gtirb.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -40,20 +40,13 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -88,18 +81,12 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R8, Gamma_R9, Gamma_VF, Gamma_ZF, Gamma_mem, NF, R0, R8, R9, VF, ZF, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1860bv64) == 1bv8); - free requires (memory_load8_le(mem, 1861bv64) == 0bv8); - free requires (memory_load8_le(mem, 1862bv64) == 2bv8); - free requires (memory_load8_le(mem, 1863bv64) == 0bv8); + free requires (memory_load32_le(mem, 1860bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_lock_security_read/clang_pic/basic_lock_security_read.expected b/src/test/correct/basic_lock_security_read/clang_pic/basic_lock_security_read.expected index 52fc57311..72842e0fe 100644 --- a/src/test/correct/basic_lock_security_read/clang_pic/basic_lock_security_read.expected +++ b/src/test/correct/basic_lock_security_read/clang_pic/basic_lock_security_read.expected @@ -20,8 +20,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -51,10 +51,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -66,16 +62,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1968bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1969bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1970bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1971bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1968bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -106,28 +99,22 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1968bv64) == 1bv8); - free requires (memory_load8_le(mem, 1969bv64) == 0bv8); - free requires (memory_load8_le(mem, 1970bv64) == 2bv8); - free requires (memory_load8_le(mem, 1971bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69688bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1968bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1968bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1969bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1970bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1971bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1968bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_lock_security_read/clang_pic/basic_lock_security_read_gtirb.expected b/src/test/correct/basic_lock_security_read/clang_pic/basic_lock_security_read_gtirb.expected index 989c887b8..ec5b9443d 100644 --- a/src/test/correct/basic_lock_security_read/clang_pic/basic_lock_security_read_gtirb.expected +++ b/src/test/correct/basic_lock_security_read/clang_pic/basic_lock_security_read_gtirb.expected @@ -20,8 +20,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -50,10 +50,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -64,16 +60,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1968bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1969bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1970bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1971bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1968bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -104,28 +97,22 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1968bv64) == 1bv8); - free requires (memory_load8_le(mem, 1969bv64) == 0bv8); - free requires (memory_load8_le(mem, 1970bv64) == 2bv8); - free requires (memory_load8_le(mem, 1971bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69688bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1968bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1968bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1969bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1970bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1971bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1968bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_lock_security_read/gcc/basic_lock_security_read.expected b/src/test/correct/basic_lock_security_read/gcc/basic_lock_security_read.expected index 78970f6a4..2c29e93fa 100644 --- a/src/test/correct/basic_lock_security_read/gcc/basic_lock_security_read.expected +++ b/src/test/correct/basic_lock_security_read/gcc/basic_lock_security_read.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -45,10 +45,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -60,10 +56,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1888bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1889bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1890bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1891bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1888bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -98,20 +91,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1888bv64) == 1bv8); - free requires (memory_load8_le(mem, 1889bv64) == 0bv8); - free requires (memory_load8_le(mem, 1890bv64) == 2bv8); - free requires (memory_load8_le(mem, 1891bv64) == 0bv8); + free requires (memory_load32_le(mem, 1888bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1888bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1889bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1890bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1891bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1888bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_lock_security_read/gcc/basic_lock_security_read_gtirb.expected b/src/test/correct/basic_lock_security_read/gcc/basic_lock_security_read_gtirb.expected index a85f2dc3f..0fba39a39 100644 --- a/src/test/correct/basic_lock_security_read/gcc/basic_lock_security_read_gtirb.expected +++ b/src/test/correct/basic_lock_security_read/gcc/basic_lock_security_read_gtirb.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -44,10 +44,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -58,10 +54,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1888bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1889bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1890bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1891bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1888bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -96,20 +89,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1888bv64) == 1bv8); - free requires (memory_load8_le(mem, 1889bv64) == 0bv8); - free requires (memory_load8_le(mem, 1890bv64) == 2bv8); - free requires (memory_load8_le(mem, 1891bv64) == 0bv8); + free requires (memory_load32_le(mem, 1888bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1888bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1889bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1890bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1891bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1888bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_lock_security_read/gcc_O2/basic_lock_security_read.expected b/src/test/correct/basic_lock_security_read/gcc_O2/basic_lock_security_read.expected index 8c6b8310d..c05476c3a 100644 --- a/src/test/correct/basic_lock_security_read/gcc_O2/basic_lock_security_read.expected +++ b/src/test/correct/basic_lock_security_read/gcc_O2/basic_lock_security_read.expected @@ -10,8 +10,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69656bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -29,19 +29,12 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -76,18 +69,12 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R2, Gamma_mem, R0, R1, R2, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_lock_security_read/gcc_O2/basic_lock_security_read_gtirb.expected b/src/test/correct/basic_lock_security_read/gcc_O2/basic_lock_security_read_gtirb.expected index 82e9c9896..33d8384af 100644 --- a/src/test/correct/basic_lock_security_read/gcc_O2/basic_lock_security_read_gtirb.expected +++ b/src/test/correct/basic_lock_security_read/gcc_O2/basic_lock_security_read_gtirb.expected @@ -10,8 +10,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69656bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -27,19 +27,12 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -74,18 +67,12 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R2, Gamma_mem, R0, R1, R2, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_lock_security_read/gcc_pic/basic_lock_security_read.expected b/src/test/correct/basic_lock_security_read/gcc_pic/basic_lock_security_read.expected index e509b20d8..f3c6100fa 100644 --- a/src/test/correct/basic_lock_security_read/gcc_pic/basic_lock_security_read.expected +++ b/src/test/correct/basic_lock_security_read/gcc_pic/basic_lock_security_read.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -49,10 +49,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -64,16 +60,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1952bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1953bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1954bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1955bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1952bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -104,28 +97,22 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1952bv64) == 1bv8); - free requires (memory_load8_le(mem, 1953bv64) == 0bv8); - free requires (memory_load8_le(mem, 1954bv64) == 2bv8); - free requires (memory_load8_le(mem, 1955bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load32_le(mem, 1952bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1952bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1953bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1954bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1955bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1952bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basic_lock_security_read/gcc_pic/basic_lock_security_read_gtirb.expected b/src/test/correct/basic_lock_security_read/gcc_pic/basic_lock_security_read_gtirb.expected index edf6b0af5..ad9c37b62 100644 --- a/src/test/correct/basic_lock_security_read/gcc_pic/basic_lock_security_read_gtirb.expected +++ b/src/test/correct/basic_lock_security_read/gcc_pic/basic_lock_security_read_gtirb.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -48,10 +48,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -62,16 +58,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) && (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))))); - free ensures (memory_load8_le(mem, 1952bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1953bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1954bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1955bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1952bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -102,28 +95,22 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1952bv64) == 1bv8); - free requires (memory_load8_le(mem, 1953bv64) == 0bv8); - free requires (memory_load8_le(mem, 1954bv64) == 2bv8); - free requires (memory_load8_le(mem, 1955bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load32_le(mem, 1952bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1952bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1953bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1954bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1955bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1952bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basic_lock_security_write/clang/basic_lock_security_write.expected b/src/test/correct/basic_lock_security_write/clang/basic_lock_security_write.expected index 18d97f346..c44be0179 100644 --- a/src/test/correct/basic_lock_security_write/clang/basic_lock_security_write.expected +++ b/src/test/correct/basic_lock_security_write/clang/basic_lock_security_write.expected @@ -16,8 +16,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -37,10 +37,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,10 +46,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))) && (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr)))); - free ensures (memory_load8_le(mem, 1884bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1885bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1886bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1887bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1884bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -90,20 +83,14 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1884bv64) == 1bv8); - free requires (memory_load8_le(mem, 1885bv64) == 0bv8); - free requires (memory_load8_le(mem, 1886bv64) == 2bv8); - free requires (memory_load8_le(mem, 1887bv64) == 0bv8); + free requires (memory_load32_le(mem, 1884bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1884bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1885bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1886bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1887bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1884bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_lock_security_write/clang/basic_lock_security_write_gtirb.expected b/src/test/correct/basic_lock_security_write/clang/basic_lock_security_write_gtirb.expected index 02034dd80..3e3676047 100644 --- a/src/test/correct/basic_lock_security_write/clang/basic_lock_security_write_gtirb.expected +++ b/src/test/correct/basic_lock_security_write/clang/basic_lock_security_write_gtirb.expected @@ -16,8 +16,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -37,10 +37,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,10 +46,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))) && (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr)))); - free ensures (memory_load8_le(mem, 1884bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1885bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1886bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1887bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1884bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -90,20 +83,14 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1884bv64) == 1bv8); - free requires (memory_load8_le(mem, 1885bv64) == 0bv8); - free requires (memory_load8_le(mem, 1886bv64) == 2bv8); - free requires (memory_load8_le(mem, 1887bv64) == 0bv8); + free requires (memory_load32_le(mem, 1884bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1884bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1885bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1886bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1887bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1884bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_lock_security_write/clang_O2/basic_lock_security_write.expected b/src/test/correct/basic_lock_security_write/clang_O2/basic_lock_security_write.expected index ee50b1b52..9696e1c13 100644 --- a/src/test/correct/basic_lock_security_write/clang_O2/basic_lock_security_write.expected +++ b/src/test/correct/basic_lock_security_write/clang_O2/basic_lock_security_write.expected @@ -10,8 +10,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,10 +39,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))) && (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr)))); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -83,18 +76,12 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_lock_security_write/clang_O2/basic_lock_security_write_gtirb.expected b/src/test/correct/basic_lock_security_write/clang_O2/basic_lock_security_write_gtirb.expected index a0d345a2e..95b42776d 100644 --- a/src/test/correct/basic_lock_security_write/clang_O2/basic_lock_security_write_gtirb.expected +++ b/src/test/correct/basic_lock_security_write/clang_O2/basic_lock_security_write_gtirb.expected @@ -10,8 +10,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,10 +39,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))) && (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr)))); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -83,18 +76,12 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_lock_security_write/clang_pic/basic_lock_security_write.expected b/src/test/correct/basic_lock_security_write/clang_pic/basic_lock_security_write.expected index a161c995d..7760307dc 100644 --- a/src/test/correct/basic_lock_security_write/clang_pic/basic_lock_security_write.expected +++ b/src/test/correct/basic_lock_security_write/clang_pic/basic_lock_security_write.expected @@ -16,8 +16,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -41,10 +41,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -54,16 +50,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))) && (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr)))); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -96,28 +89,22 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1956bv64) == 1bv8); - free requires (memory_load8_le(mem, 1957bv64) == 0bv8); - free requires (memory_load8_le(mem, 1958bv64) == 2bv8); - free requires (memory_load8_le(mem, 1959bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69688bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1956bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_lock_security_write/clang_pic/basic_lock_security_write_gtirb.expected b/src/test/correct/basic_lock_security_write/clang_pic/basic_lock_security_write_gtirb.expected index 151193748..be7dfc3fa 100644 --- a/src/test/correct/basic_lock_security_write/clang_pic/basic_lock_security_write_gtirb.expected +++ b/src/test/correct/basic_lock_security_write/clang_pic/basic_lock_security_write_gtirb.expected @@ -16,8 +16,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -41,10 +41,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -54,16 +50,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))) && (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr)))); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -96,28 +89,22 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1956bv64) == 1bv8); - free requires (memory_load8_le(mem, 1957bv64) == 0bv8); - free requires (memory_load8_le(mem, 1958bv64) == 2bv8); - free requires (memory_load8_le(mem, 1959bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69688bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1956bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_lock_security_write/gcc/basic_lock_security_write.expected b/src/test/correct/basic_lock_security_write/gcc/basic_lock_security_write.expected index 58316b080..2429e5c62 100644 --- a/src/test/correct/basic_lock_security_write/gcc/basic_lock_security_write.expected +++ b/src/test/correct/basic_lock_security_write/gcc/basic_lock_security_write.expected @@ -12,8 +12,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -46,10 +42,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))) && (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr)))); - free ensures (memory_load8_le(mem, 1908bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1909bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1910bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1911bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1908bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -86,20 +79,14 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1908bv64) == 1bv8); - free requires (memory_load8_le(mem, 1909bv64) == 0bv8); - free requires (memory_load8_le(mem, 1910bv64) == 2bv8); - free requires (memory_load8_le(mem, 1911bv64) == 0bv8); + free requires (memory_load32_le(mem, 1908bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1908bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1909bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1910bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1911bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1908bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -120,8 +107,8 @@ implementation main() R1, Gamma_R1 := 1bv64, true; call rely(); assert (L(mem, R0) ==> Gamma_R1); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, R1[32:0]), gamma_store32(Gamma_mem, R0, Gamma_R1); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); @@ -132,8 +119,8 @@ implementation main() R1, Gamma_R1 := zero_extend32_32(memory_load32_le(stack, bvadd64(R31, 12bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 12bv64)); call rely(); assert (L(mem, R0) ==> Gamma_R1); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, R1[32:0]), gamma_store32(Gamma_mem, R0, Gamma_R1); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); @@ -143,8 +130,8 @@ implementation main() R0, Gamma_R0 := bvadd64(R0, 20bv64), Gamma_R0; call rely(); assert (L(mem, R0) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, 0bv32), gamma_store32(Gamma_mem, R0, true); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); @@ -154,8 +141,8 @@ implementation main() R0, Gamma_R0 := bvadd64(R0, 24bv64), Gamma_R0; call rely(); assert (L(mem, R0) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, 0bv32), gamma_store32(Gamma_mem, R0, true); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); diff --git a/src/test/correct/basic_lock_security_write/gcc/basic_lock_security_write_gtirb.expected b/src/test/correct/basic_lock_security_write/gcc/basic_lock_security_write_gtirb.expected index b09f64f99..376db824c 100644 --- a/src/test/correct/basic_lock_security_write/gcc/basic_lock_security_write_gtirb.expected +++ b/src/test/correct/basic_lock_security_write/gcc/basic_lock_security_write_gtirb.expected @@ -12,8 +12,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -46,10 +42,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))) && (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr)))); - free ensures (memory_load8_le(mem, 1908bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1909bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1910bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1911bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1908bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -86,20 +79,14 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1908bv64) == 1bv8); - free requires (memory_load8_le(mem, 1909bv64) == 0bv8); - free requires (memory_load8_le(mem, 1910bv64) == 2bv8); - free requires (memory_load8_le(mem, 1911bv64) == 0bv8); + free requires (memory_load32_le(mem, 1908bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1908bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1909bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1910bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1911bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1908bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -120,8 +107,8 @@ implementation main() R1, Gamma_R1 := 1bv64, true; call rely(); assert (L(mem, R0) ==> Gamma_R1); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, R1[32:0]), gamma_store32(Gamma_mem, R0, Gamma_R1); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); @@ -132,8 +119,8 @@ implementation main() R1, Gamma_R1 := zero_extend32_32(memory_load32_le(stack, bvadd64(R31, 12bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 12bv64)); call rely(); assert (L(mem, R0) ==> Gamma_R1); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, R1[32:0]), gamma_store32(Gamma_mem, R0, Gamma_R1); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); @@ -143,8 +130,8 @@ implementation main() R0, Gamma_R0 := bvadd64(R0, 20bv64), Gamma_R0; call rely(); assert (L(mem, R0) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, 0bv32), gamma_store32(Gamma_mem, R0, true); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); @@ -154,8 +141,8 @@ implementation main() R0, Gamma_R0 := bvadd64(R0, 24bv64), Gamma_R0; call rely(); assert (L(mem, R0) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, 0bv32), gamma_store32(Gamma_mem, R0, true); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); diff --git a/src/test/correct/basic_lock_security_write/gcc_O2/basic_lock_security_write.expected b/src/test/correct/basic_lock_security_write/gcc_O2/basic_lock_security_write.expected index 142ca8ac7..4c3d6f081 100644 --- a/src/test/correct/basic_lock_security_write/gcc_O2/basic_lock_security_write.expected +++ b/src/test/correct/basic_lock_security_write/gcc_O2/basic_lock_security_write.expected @@ -10,8 +10,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,10 +39,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))) && (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr)))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -83,18 +76,12 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -112,8 +99,8 @@ implementation main() R0, Gamma_R0 := 0bv64, true; call rely(); assert (L(mem, bvadd64(R1, 20bv64)) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, bvadd64(R1, 20bv64), 0bv32), gamma_store32(Gamma_mem, bvadd64(R1, 20bv64), true); assert ((bvadd64(R1, 20bv64) == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); @@ -121,8 +108,8 @@ implementation main() assume {:captureState "%000001bd"} true; call rely(); assert (L(mem, bvadd64(R2, 4bv64)) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, bvadd64(R2, 4bv64), 0bv32), gamma_store32(Gamma_mem, bvadd64(R2, 4bv64), true); assert ((bvadd64(R2, 4bv64) == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); diff --git a/src/test/correct/basic_lock_security_write/gcc_O2/basic_lock_security_write_gtirb.expected b/src/test/correct/basic_lock_security_write/gcc_O2/basic_lock_security_write_gtirb.expected index 346670a0f..d70a6810b 100644 --- a/src/test/correct/basic_lock_security_write/gcc_O2/basic_lock_security_write_gtirb.expected +++ b/src/test/correct/basic_lock_security_write/gcc_O2/basic_lock_security_write_gtirb.expected @@ -10,8 +10,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,10 +39,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))) && (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr)))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -83,18 +76,12 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -112,8 +99,8 @@ implementation main() R0, Gamma_R0 := 0bv64, true; call rely(); assert (L(mem, bvadd64(R1, 20bv64)) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, bvadd64(R1, 20bv64), 0bv32), gamma_store32(Gamma_mem, bvadd64(R1, 20bv64), true); assert ((bvadd64(R1, 20bv64) == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); @@ -121,8 +108,8 @@ implementation main() assume {:captureState "1548$0"} true; call rely(); assert (L(mem, bvadd64(R2, 4bv64)) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, bvadd64(R2, 4bv64), 0bv32), gamma_store32(Gamma_mem, bvadd64(R2, 4bv64), true); assert ((bvadd64(R2, 4bv64) == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); diff --git a/src/test/correct/basic_lock_security_write/gcc_pic/basic_lock_security_write.expected b/src/test/correct/basic_lock_security_write/gcc_pic/basic_lock_security_write.expected index e9c34ead8..91e7850ba 100644 --- a/src/test/correct/basic_lock_security_write/gcc_pic/basic_lock_security_write.expected +++ b/src/test/correct/basic_lock_security_write/gcc_pic/basic_lock_security_write.expected @@ -12,8 +12,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -37,10 +37,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,16 +46,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))) && (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr)))); - free ensures (memory_load8_le(mem, 1972bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1973bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1974bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1975bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1972bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -92,28 +85,22 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1972bv64) == 1bv8); - free requires (memory_load8_le(mem, 1973bv64) == 0bv8); - free requires (memory_load8_le(mem, 1974bv64) == 2bv8); - free requires (memory_load8_le(mem, 1975bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load32_le(mem, 1972bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1972bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1973bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1974bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1975bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1972bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { @@ -131,8 +118,8 @@ implementation main() R1, Gamma_R1 := 1bv64, true; call rely(); assert (L(mem, R0) ==> Gamma_R1); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, R1[32:0]), gamma_store32(Gamma_mem, R0, Gamma_R1); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); @@ -144,8 +131,8 @@ implementation main() R1, Gamma_R1 := zero_extend32_32(memory_load32_le(stack, bvadd64(R31, 12bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 12bv64)); call rely(); assert (L(mem, R0) ==> Gamma_R1); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, R1[32:0]), gamma_store32(Gamma_mem, R0, Gamma_R1); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); @@ -156,8 +143,8 @@ implementation main() R0, Gamma_R0 := memory_load64_le(mem, bvadd64(R0, 4064bv64)), (gamma_load64(Gamma_mem, bvadd64(R0, 4064bv64)) || L(mem, bvadd64(R0, 4064bv64))); call rely(); assert (L(mem, R0) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, 0bv32), gamma_store32(Gamma_mem, R0, true); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); @@ -168,8 +155,8 @@ implementation main() R0, Gamma_R0 := memory_load64_le(mem, bvadd64(R0, 4056bv64)), (gamma_load64(Gamma_mem, bvadd64(R0, 4056bv64)) || L(mem, bvadd64(R0, 4056bv64))); call rely(); assert (L(mem, R0) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, 0bv32), gamma_store32(Gamma_mem, R0, true); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); diff --git a/src/test/correct/basic_lock_security_write/gcc_pic/basic_lock_security_write_gtirb.expected b/src/test/correct/basic_lock_security_write/gcc_pic/basic_lock_security_write_gtirb.expected index b955f39f5..28daee648 100644 --- a/src/test/correct/basic_lock_security_write/gcc_pic/basic_lock_security_write_gtirb.expected +++ b/src/test/correct/basic_lock_security_write/gcc_pic/basic_lock_security_write_gtirb.expected @@ -12,8 +12,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else (if (index == $z_addr) then true else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -37,10 +37,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,16 +46,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))) && (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr)))); - free ensures (memory_load8_le(mem, 1972bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1973bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1974bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1975bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1972bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -92,28 +85,22 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1972bv64) == 1bv8); - free requires (memory_load8_le(mem, 1973bv64) == 0bv8); - free requires (memory_load8_le(mem, 1974bv64) == 2bv8); - free requires (memory_load8_le(mem, 1975bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load32_le(mem, 1972bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1972bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1973bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1974bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1975bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1972bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { @@ -131,8 +118,8 @@ implementation main() R1, Gamma_R1 := 1bv64, true; call rely(); assert (L(mem, R0) ==> Gamma_R1); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, R1[32:0]), gamma_store32(Gamma_mem, R0, Gamma_R1); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); @@ -144,8 +131,8 @@ implementation main() R1, Gamma_R1 := zero_extend32_32(memory_load32_le(stack, bvadd64(R31, 12bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 12bv64)); call rely(); assert (L(mem, R0) ==> Gamma_R1); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, R1[32:0]), gamma_store32(Gamma_mem, R0, Gamma_R1); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); @@ -156,8 +143,8 @@ implementation main() R0, Gamma_R0 := memory_load64_le(mem, bvadd64(R0, 4064bv64)), (gamma_load64(Gamma_mem, bvadd64(R0, 4064bv64)) || L(mem, bvadd64(R0, 4064bv64))); call rely(); assert (L(mem, R0) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, 0bv32), gamma_store32(Gamma_mem, R0, true); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); @@ -168,8 +155,8 @@ implementation main() R0, Gamma_R0 := memory_load64_le(mem, bvadd64(R0, 4056bv64)), (gamma_load64(Gamma_mem, bvadd64(R0, 4056bv64)) || L(mem, bvadd64(R0, 4056bv64))); call rely(); assert (L(mem, R0) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, 0bv32), gamma_store32(Gamma_mem, R0, true); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); diff --git a/src/test/correct/basic_lock_unlock/clang/basic_lock_unlock.expected b/src/test/correct/basic_lock_unlock/clang/basic_lock_unlock.expected index de81ded1c..29577149e 100644 --- a/src/test/correct/basic_lock_unlock/clang/basic_lock_unlock.expected +++ b/src/test/correct/basic_lock_unlock/clang/basic_lock_unlock.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -39,10 +35,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -78,18 +71,12 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 1bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1860bv64) == 1bv8); - free requires (memory_load8_le(mem, 1861bv64) == 0bv8); - free requires (memory_load8_le(mem, 1862bv64) == 2bv8); - free requires (memory_load8_le(mem, 1863bv64) == 0bv8); + free requires (memory_load32_le(mem, 1860bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -105,8 +92,8 @@ implementation main() R8, Gamma_R8 := 1bv64, true; call rely(); assert (L(mem, bvadd64(R9, 52bv64)) ==> Gamma_R8); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, bvadd64(R9, 52bv64), R8[32:0]), gamma_store32(Gamma_mem, bvadd64(R9, 52bv64), Gamma_R8); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "%000002d6"} true; @@ -114,8 +101,8 @@ implementation main() R0, Gamma_R0 := 0bv64, true; call rely(); assert (L(mem, bvadd64(R8, 56bv64)) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, bvadd64(R8, 56bv64), 0bv32), gamma_store32(Gamma_mem, bvadd64(R8, 56bv64), true); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "%000002e7"} true; diff --git a/src/test/correct/basic_lock_unlock/clang/basic_lock_unlock_gtirb.expected b/src/test/correct/basic_lock_unlock/clang/basic_lock_unlock_gtirb.expected index 06d4ae629..bf9abb037 100644 --- a/src/test/correct/basic_lock_unlock/clang/basic_lock_unlock_gtirb.expected +++ b/src/test/correct/basic_lock_unlock/clang/basic_lock_unlock_gtirb.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -39,10 +35,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -78,18 +71,12 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 1bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1860bv64) == 1bv8); - free requires (memory_load8_le(mem, 1861bv64) == 0bv8); - free requires (memory_load8_le(mem, 1862bv64) == 2bv8); - free requires (memory_load8_le(mem, 1863bv64) == 0bv8); + free requires (memory_load32_le(mem, 1860bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -105,8 +92,8 @@ implementation main() R8, Gamma_R8 := 1bv64, true; call rely(); assert (L(mem, bvadd64(R9, 52bv64)) ==> Gamma_R8); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, bvadd64(R9, 52bv64), R8[32:0]), gamma_store32(Gamma_mem, bvadd64(R9, 52bv64), Gamma_R8); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "1820$0"} true; @@ -114,8 +101,8 @@ implementation main() R0, Gamma_R0 := 0bv64, true; call rely(); assert (L(mem, bvadd64(R8, 56bv64)) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, bvadd64(R8, 56bv64), 0bv32), gamma_store32(Gamma_mem, bvadd64(R8, 56bv64), true); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "1832$0"} true; diff --git a/src/test/correct/basic_lock_unlock/clang_pic/basic_lock_unlock.expected b/src/test/correct/basic_lock_unlock/clang_pic/basic_lock_unlock.expected index 652cd6a09..2cee414a8 100644 --- a/src/test/correct/basic_lock_unlock/clang_pic/basic_lock_unlock.expected +++ b/src/test/correct/basic_lock_unlock/clang_pic/basic_lock_unlock.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,16 +39,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -84,26 +77,20 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 1bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1932bv64) == 1bv8); - free requires (memory_load8_le(mem, 1933bv64) == 0bv8); - free requires (memory_load8_le(mem, 1934bv64) == 2bv8); - free requires (memory_load8_le(mem, 1935bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1932bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69688bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { @@ -117,8 +104,8 @@ implementation main() R8, Gamma_R8 := 1bv64, true; call rely(); assert (L(mem, R9) ==> Gamma_R8); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, R9, R8[32:0]), gamma_store32(Gamma_mem, R9, Gamma_R8); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "%000002e5"} true; @@ -128,8 +115,8 @@ implementation main() R0, Gamma_R0 := 0bv64, true; call rely(); assert (L(mem, R8) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, R8, 0bv32), gamma_store32(Gamma_mem, R8, true); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "%000002fd"} true; diff --git a/src/test/correct/basic_lock_unlock/clang_pic/basic_lock_unlock_gtirb.expected b/src/test/correct/basic_lock_unlock/clang_pic/basic_lock_unlock_gtirb.expected index 923456543..d4173aa27 100644 --- a/src/test/correct/basic_lock_unlock/clang_pic/basic_lock_unlock_gtirb.expected +++ b/src/test/correct/basic_lock_unlock/clang_pic/basic_lock_unlock_gtirb.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,16 +39,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -84,26 +77,20 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 1bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1932bv64) == 1bv8); - free requires (memory_load8_le(mem, 1933bv64) == 0bv8); - free requires (memory_load8_le(mem, 1934bv64) == 2bv8); - free requires (memory_load8_le(mem, 1935bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1932bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69688bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { @@ -117,8 +104,8 @@ implementation main() R8, Gamma_R8 := 1bv64, true; call rely(); assert (L(mem, R9) ==> Gamma_R8); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, R9, R8[32:0]), gamma_store32(Gamma_mem, R9, Gamma_R8); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "1888$0"} true; @@ -128,8 +115,8 @@ implementation main() R0, Gamma_R0 := 0bv64, true; call rely(); assert (L(mem, R8) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, R8, 0bv32), gamma_store32(Gamma_mem, R8, true); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "1904$0"} true; diff --git a/src/test/correct/basic_lock_unlock/gcc/basic_lock_unlock.expected b/src/test/correct/basic_lock_unlock/gcc/basic_lock_unlock.expected index a65434ffb..3b4d7df9b 100644 --- a/src/test/correct/basic_lock_unlock/gcc/basic_lock_unlock.expected +++ b/src/test/correct/basic_lock_unlock/gcc/basic_lock_unlock.expected @@ -8,7 +8,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -25,10 +25,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -37,10 +33,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -76,18 +69,12 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 1bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1868bv64) == 1bv8); - free requires (memory_load8_le(mem, 1869bv64) == 0bv8); - free requires (memory_load8_le(mem, 1870bv64) == 2bv8); - free requires (memory_load8_le(mem, 1871bv64) == 0bv8); + free requires (memory_load32_le(mem, 1868bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -104,8 +91,8 @@ implementation main() R1, Gamma_R1 := 1bv64, true; call rely(); assert (L(mem, R0) ==> Gamma_R1); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, R0, R1[32:0]), gamma_store32(Gamma_mem, R0, Gamma_R1); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "%000002e4"} true; @@ -113,8 +100,8 @@ implementation main() R0, Gamma_R0 := bvadd64(R0, 24bv64), Gamma_R0; call rely(); assert (L(mem, R0) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, R0, 0bv32), gamma_store32(Gamma_mem, R0, true); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "%000002f6"} true; diff --git a/src/test/correct/basic_lock_unlock/gcc/basic_lock_unlock_gtirb.expected b/src/test/correct/basic_lock_unlock/gcc/basic_lock_unlock_gtirb.expected index dd838e366..0e359f034 100644 --- a/src/test/correct/basic_lock_unlock/gcc/basic_lock_unlock_gtirb.expected +++ b/src/test/correct/basic_lock_unlock/gcc/basic_lock_unlock_gtirb.expected @@ -8,7 +8,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -25,10 +25,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -37,10 +33,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -76,18 +69,12 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 1bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1868bv64) == 1bv8); - free requires (memory_load8_le(mem, 1869bv64) == 0bv8); - free requires (memory_load8_le(mem, 1870bv64) == 2bv8); - free requires (memory_load8_le(mem, 1871bv64) == 0bv8); + free requires (memory_load32_le(mem, 1868bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -104,8 +91,8 @@ implementation main() R1, Gamma_R1 := 1bv64, true; call rely(); assert (L(mem, R0) ==> Gamma_R1); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, R0, R1[32:0]), gamma_store32(Gamma_mem, R0, Gamma_R1); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "1824$0"} true; @@ -113,8 +100,8 @@ implementation main() R0, Gamma_R0 := bvadd64(R0, 24bv64), Gamma_R0; call rely(); assert (L(mem, R0) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, R0, 0bv32), gamma_store32(Gamma_mem, R0, true); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "1836$0"} true; diff --git a/src/test/correct/basic_lock_unlock/gcc_O2/basic_lock_unlock.expected b/src/test/correct/basic_lock_unlock/gcc_O2/basic_lock_unlock.expected index e05f293a4..eec3701a1 100644 --- a/src/test/correct/basic_lock_unlock/gcc_O2/basic_lock_unlock.expected +++ b/src/test/correct/basic_lock_unlock/gcc_O2/basic_lock_unlock.expected @@ -12,7 +12,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -41,10 +37,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -80,18 +73,12 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 1bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -109,15 +96,15 @@ implementation main() R0, Gamma_R0 := 0bv64, true; call rely(); assert (L(mem, bvadd64(R1, 20bv64)) ==> Gamma_R3); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, bvadd64(R1, 20bv64), R3[32:0]), gamma_store32(Gamma_mem, bvadd64(R1, 20bv64), Gamma_R3); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "%000001c3"} true; call rely(); assert (L(mem, bvadd64(R2, 4bv64)) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, bvadd64(R2, 4bv64), 0bv32), gamma_store32(Gamma_mem, bvadd64(R2, 4bv64), true); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "%000001ca"} true; diff --git a/src/test/correct/basic_lock_unlock/gcc_O2/basic_lock_unlock_gtirb.expected b/src/test/correct/basic_lock_unlock/gcc_O2/basic_lock_unlock_gtirb.expected index 59bd7e039..00dbd52b5 100644 --- a/src/test/correct/basic_lock_unlock/gcc_O2/basic_lock_unlock_gtirb.expected +++ b/src/test/correct/basic_lock_unlock/gcc_O2/basic_lock_unlock_gtirb.expected @@ -12,7 +12,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -41,10 +37,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -80,18 +73,12 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 1bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -109,15 +96,15 @@ implementation main() R0, Gamma_R0 := 0bv64, true; call rely(); assert (L(mem, bvadd64(R1, 20bv64)) ==> Gamma_R3); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, bvadd64(R1, 20bv64), R3[32:0]), gamma_store32(Gamma_mem, bvadd64(R1, 20bv64), Gamma_R3); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "1552$0"} true; call rely(); assert (L(mem, bvadd64(R2, 4bv64)) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, bvadd64(R2, 4bv64), 0bv32), gamma_store32(Gamma_mem, bvadd64(R2, 4bv64), true); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "1556$0"} true; diff --git a/src/test/correct/basic_lock_unlock/gcc_pic/basic_lock_unlock.expected b/src/test/correct/basic_lock_unlock/gcc_pic/basic_lock_unlock.expected index 25a9043d3..da9677480 100644 --- a/src/test/correct/basic_lock_unlock/gcc_pic/basic_lock_unlock.expected +++ b/src/test/correct/basic_lock_unlock/gcc_pic/basic_lock_unlock.expected @@ -8,7 +8,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -41,16 +37,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -82,26 +75,20 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 1bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1932bv64) == 1bv8); - free requires (memory_load8_le(mem, 1933bv64) == 0bv8); - free requires (memory_load8_le(mem, 1934bv64) == 2bv8); - free requires (memory_load8_le(mem, 1935bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load32_le(mem, 1932bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { @@ -115,8 +102,8 @@ implementation main() R1, Gamma_R1 := 1bv64, true; call rely(); assert (L(mem, R0) ==> Gamma_R1); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, R0, R1[32:0]), gamma_store32(Gamma_mem, R0, Gamma_R1); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "%000002e5"} true; @@ -125,8 +112,8 @@ implementation main() R0, Gamma_R0 := memory_load64_le(mem, bvadd64(R0, 4056bv64)), (gamma_load64(Gamma_mem, bvadd64(R0, 4056bv64)) || L(mem, bvadd64(R0, 4056bv64))); call rely(); assert (L(mem, R0) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, R0, 0bv32), gamma_store32(Gamma_mem, R0, true); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "%000002f8"} true; diff --git a/src/test/correct/basic_lock_unlock/gcc_pic/basic_lock_unlock_gtirb.expected b/src/test/correct/basic_lock_unlock/gcc_pic/basic_lock_unlock_gtirb.expected index 94a34679d..65c38fdf8 100644 --- a/src/test/correct/basic_lock_unlock/gcc_pic/basic_lock_unlock_gtirb.expected +++ b/src/test/correct/basic_lock_unlock/gcc_pic/basic_lock_unlock_gtirb.expected @@ -8,7 +8,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $z_addr) then true else false)) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -41,16 +37,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -82,26 +75,20 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 1bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1932bv64) == 1bv8); - free requires (memory_load8_le(mem, 1933bv64) == 0bv8); - free requires (memory_load8_le(mem, 1934bv64) == 2bv8); - free requires (memory_load8_le(mem, 1935bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load32_le(mem, 1932bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { @@ -115,8 +102,8 @@ implementation main() R1, Gamma_R1 := 1bv64, true; call rely(); assert (L(mem, R0) ==> Gamma_R1); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, R0, R1[32:0]), gamma_store32(Gamma_mem, R0, Gamma_R1); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "1888$0"} true; @@ -125,8 +112,8 @@ implementation main() R0, Gamma_R0 := memory_load64_le(mem, bvadd64(R0, 4056bv64)), (gamma_load64(Gamma_mem, bvadd64(R0, 4056bv64)) || L(mem, bvadd64(R0, 4056bv64))); call rely(); assert (L(mem, R0) ==> true); - z_old := memory_load32_le(mem, $z_addr); x_old := memory_load32_le(mem, $x_addr); + z_old := memory_load32_le(mem, $z_addr); mem, Gamma_mem := memory_store32_le(mem, R0, 0bv32), gamma_store32(Gamma_mem, R0, true); assert ((z_old == 0bv32) ==> ((memory_load32_le(mem, $x_addr) == x_old) && (memory_load32_le(mem, $z_addr) == z_old))); assume {:captureState "1900$0"} true; diff --git a/src/test/correct/basic_loop_assign/clang/basic_loop_assign.expected b/src/test/correct/basic_loop_assign/clang/basic_loop_assign.expected index 619574ba1..ee929dc6b 100644 --- a/src/test/correct/basic_loop_assign/clang/basic_loop_assign.expected +++ b/src/test/correct/basic_loop_assign/clang/basic_loop_assign.expected @@ -8,7 +8,7 @@ var {:extern} R9: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -39,10 +35,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (bvsle32(memory_load32_le(mem, $x_addr), 10bv32) && bvslt32(old(memory_load32_le(mem, $x_addr)), 10bv32))) || ((memory_load32_le(mem, $x_addr) == 21bv32) && (old(memory_load32_le(mem, $x_addr)) == 20bv32))); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -78,19 +71,13 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1852bv64) == 1bv8); - free requires (memory_load8_le(mem, 1853bv64) == 0bv8); - free requires (memory_load8_le(mem, 1854bv64) == 2bv8); - free requires (memory_load8_le(mem, 1855bv64) == 0bv8); + free requires (memory_load32_le(mem, 1852bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures ((memory_load32_le(mem, $x_addr) == 20bv32) || (memory_load32_le(mem, $x_addr) == 21bv32)); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_loop_assign/clang/basic_loop_assign_gtirb.expected b/src/test/correct/basic_loop_assign/clang/basic_loop_assign_gtirb.expected index 09495b51b..1360f86d7 100644 --- a/src/test/correct/basic_loop_assign/clang/basic_loop_assign_gtirb.expected +++ b/src/test/correct/basic_loop_assign/clang/basic_loop_assign_gtirb.expected @@ -8,7 +8,7 @@ var {:extern} R9: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -39,10 +35,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (bvsle32(memory_load32_le(mem, $x_addr), 10bv32) && bvslt32(old(memory_load32_le(mem, $x_addr)), 10bv32))) || ((memory_load32_le(mem, $x_addr) == 21bv32) && (old(memory_load32_le(mem, $x_addr)) == 20bv32))); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -78,19 +71,13 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1852bv64) == 1bv8); - free requires (memory_load8_le(mem, 1853bv64) == 0bv8); - free requires (memory_load8_le(mem, 1854bv64) == 2bv8); - free requires (memory_load8_le(mem, 1855bv64) == 0bv8); + free requires (memory_load32_le(mem, 1852bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures ((memory_load32_le(mem, $x_addr) == 20bv32) || (memory_load32_le(mem, $x_addr) == 21bv32)); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_loop_assign/clang_pic/basic_loop_assign.expected b/src/test/correct/basic_loop_assign/clang_pic/basic_loop_assign.expected index 774f8bda5..83c27b684 100644 --- a/src/test/correct/basic_loop_assign/clang_pic/basic_loop_assign.expected +++ b/src/test/correct/basic_loop_assign/clang_pic/basic_loop_assign.expected @@ -8,7 +8,7 @@ var {:extern} R9: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,15 +39,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (bvsle32(memory_load32_le(mem, $x_addr), 10bv32) && bvslt32(old(memory_load32_le(mem, $x_addr)), 10bv32))) || ((memory_load32_le(mem, $x_addr) == 21bv32) && (old(memory_load32_le(mem, $x_addr)) == 20bv32))); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -83,25 +76,19 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures ((memory_load32_le(mem, $x_addr) == 20bv32) || (memory_load32_le(mem, $x_addr) == 21bv32)); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_loop_assign/clang_pic/basic_loop_assign_gtirb.expected b/src/test/correct/basic_loop_assign/clang_pic/basic_loop_assign_gtirb.expected index bb96a23c0..f0747770f 100644 --- a/src/test/correct/basic_loop_assign/clang_pic/basic_loop_assign_gtirb.expected +++ b/src/test/correct/basic_loop_assign/clang_pic/basic_loop_assign_gtirb.expected @@ -8,7 +8,7 @@ var {:extern} R9: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,15 +39,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (bvsle32(memory_load32_le(mem, $x_addr), 10bv32) && bvslt32(old(memory_load32_le(mem, $x_addr)), 10bv32))) || ((memory_load32_le(mem, $x_addr) == 21bv32) && (old(memory_load32_le(mem, $x_addr)) == 20bv32))); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -83,25 +76,19 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures ((memory_load32_le(mem, $x_addr) == 20bv32) || (memory_load32_le(mem, $x_addr) == 21bv32)); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_loop_assign/gcc/basic_loop_assign.expected b/src/test/correct/basic_loop_assign/gcc/basic_loop_assign.expected index 03d3a3556..e0062a835 100644 --- a/src/test/correct/basic_loop_assign/gcc/basic_loop_assign.expected +++ b/src/test/correct/basic_loop_assign/gcc/basic_loop_assign.expected @@ -6,7 +6,7 @@ var {:extern} R1: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -25,10 +25,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -37,10 +33,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (bvsle32(memory_load32_le(mem, $x_addr), 10bv32) && bvslt32(old(memory_load32_le(mem, $x_addr)), 10bv32))) || ((memory_load32_le(mem, $x_addr) == 21bv32) && (old(memory_load32_le(mem, $x_addr)) == 20bv32))); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -76,19 +69,13 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures ((memory_load32_le(mem, $x_addr) == 20bv32) || (memory_load32_le(mem, $x_addr) == 21bv32)); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_loop_assign/gcc/basic_loop_assign_gtirb.expected b/src/test/correct/basic_loop_assign/gcc/basic_loop_assign_gtirb.expected index f01e436af..0af511b16 100644 --- a/src/test/correct/basic_loop_assign/gcc/basic_loop_assign_gtirb.expected +++ b/src/test/correct/basic_loop_assign/gcc/basic_loop_assign_gtirb.expected @@ -6,7 +6,7 @@ var {:extern} R1: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -25,10 +25,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -37,10 +33,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (bvsle32(memory_load32_le(mem, $x_addr), 10bv32) && bvslt32(old(memory_load32_le(mem, $x_addr)), 10bv32))) || ((memory_load32_le(mem, $x_addr) == 21bv32) && (old(memory_load32_le(mem, $x_addr)) == 20bv32))); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -76,19 +69,13 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures ((memory_load32_le(mem, $x_addr) == 20bv32) || (memory_load32_le(mem, $x_addr) == 21bv32)); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_loop_assign/gcc_O2/basic_loop_assign.expected b/src/test/correct/basic_loop_assign/gcc_O2/basic_loop_assign.expected index 3b2cc89e6..ba3a372d4 100644 --- a/src/test/correct/basic_loop_assign/gcc_O2/basic_loop_assign.expected +++ b/src/test/correct/basic_loop_assign/gcc_O2/basic_loop_assign.expected @@ -8,7 +8,7 @@ var {:extern} R2: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -39,10 +35,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (bvsle32(memory_load32_le(mem, $x_addr), 10bv32) && bvslt32(old(memory_load32_le(mem, $x_addr)), 10bv32))) || ((memory_load32_le(mem, $x_addr) == 21bv32) && (old(memory_load32_le(mem, $x_addr)) == 20bv32))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -78,19 +71,13 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures ((memory_load32_le(mem, $x_addr) == 20bv32) || (memory_load32_le(mem, $x_addr) == 21bv32)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_loop_assign/gcc_O2/basic_loop_assign_gtirb.expected b/src/test/correct/basic_loop_assign/gcc_O2/basic_loop_assign_gtirb.expected index 2b0945da1..a6dc84191 100644 --- a/src/test/correct/basic_loop_assign/gcc_O2/basic_loop_assign_gtirb.expected +++ b/src/test/correct/basic_loop_assign/gcc_O2/basic_loop_assign_gtirb.expected @@ -8,7 +8,7 @@ var {:extern} R2: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -39,10 +35,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (bvsle32(memory_load32_le(mem, $x_addr), 10bv32) && bvslt32(old(memory_load32_le(mem, $x_addr)), 10bv32))) || ((memory_load32_le(mem, $x_addr) == 21bv32) && (old(memory_load32_le(mem, $x_addr)) == 20bv32))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -78,19 +71,13 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures ((memory_load32_le(mem, $x_addr) == 20bv32) || (memory_load32_le(mem, $x_addr) == 21bv32)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_loop_assign/gcc_pic/basic_loop_assign.expected b/src/test/correct/basic_loop_assign/gcc_pic/basic_loop_assign.expected index 026236fd8..17b1df7e3 100644 --- a/src/test/correct/basic_loop_assign/gcc_pic/basic_loop_assign.expected +++ b/src/test/correct/basic_loop_assign/gcc_pic/basic_loop_assign.expected @@ -6,7 +6,7 @@ var {:extern} R1: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -41,15 +37,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (bvsle32(memory_load32_le(mem, $x_addr), 10bv32) && bvslt32(old(memory_load32_le(mem, $x_addr)), 10bv32))) || ((memory_load32_le(mem, $x_addr) == 21bv32) && (old(memory_load32_le(mem, $x_addr)) == 20bv32))); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -81,25 +74,19 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures ((memory_load32_le(mem, $x_addr) == 20bv32) || (memory_load32_le(mem, $x_addr) == 21bv32)); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basic_loop_assign/gcc_pic/basic_loop_assign_gtirb.expected b/src/test/correct/basic_loop_assign/gcc_pic/basic_loop_assign_gtirb.expected index 89072a4ba..399b13120 100644 --- a/src/test/correct/basic_loop_assign/gcc_pic/basic_loop_assign_gtirb.expected +++ b/src/test/correct/basic_loop_assign/gcc_pic/basic_loop_assign_gtirb.expected @@ -6,7 +6,7 @@ var {:extern} R1: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -41,15 +37,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (((memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))) || (bvsle32(memory_load32_le(mem, $x_addr), 10bv32) && bvslt32(old(memory_load32_le(mem, $x_addr)), 10bv32))) || ((memory_load32_le(mem, $x_addr) == 21bv32) && (old(memory_load32_le(mem, $x_addr)) == 20bv32))); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -81,25 +74,19 @@ procedure main(); requires (memory_load32_le(mem, $x_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures ((memory_load32_le(mem, $x_addr) == 20bv32) || (memory_load32_le(mem, $x_addr) == 21bv32)); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basic_operation_evaluation/clang/basic_operation_evaluation.expected b/src/test/correct/basic_operation_evaluation/clang/basic_operation_evaluation.expected index e2c00441a..97c370685 100644 --- a/src/test/correct/basic_operation_evaluation/clang/basic_operation_evaluation.expected +++ b/src/test/correct/basic_operation_evaluation/clang/basic_operation_evaluation.expected @@ -55,10 +55,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -72,12 +68,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1952bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1953bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1954bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1955bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1952bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -85,8 +78,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -103,20 +96,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R10, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_VF, Gamma_ZF, Gamma_stack, NF, R0, R10, R31, R8, R9, VF, ZF, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1952bv64) == 1bv8); - free requires (memory_load8_le(mem, 1953bv64) == 0bv8); - free requires (memory_load8_le(mem, 1954bv64) == 2bv8); - free requires (memory_load8_le(mem, 1955bv64) == 0bv8); + free requires (memory_load32_le(mem, 1952bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1952bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1953bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1954bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1955bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1952bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_operation_evaluation/clang/basic_operation_evaluation_gtirb.expected b/src/test/correct/basic_operation_evaluation/clang/basic_operation_evaluation_gtirb.expected index bd0a36ae5..cf9b45fac 100644 --- a/src/test/correct/basic_operation_evaluation/clang/basic_operation_evaluation_gtirb.expected +++ b/src/test/correct/basic_operation_evaluation/clang/basic_operation_evaluation_gtirb.expected @@ -55,10 +55,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -72,12 +68,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1952bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1953bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1954bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1955bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1952bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -85,8 +78,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -103,20 +96,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R10, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_VF, Gamma_ZF, Gamma_stack, NF, R0, R10, R31, R8, R9, VF, ZF, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1952bv64) == 1bv8); - free requires (memory_load8_le(mem, 1953bv64) == 0bv8); - free requires (memory_load8_le(mem, 1954bv64) == 2bv8); - free requires (memory_load8_le(mem, 1955bv64) == 0bv8); + free requires (memory_load32_le(mem, 1952bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1952bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1953bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1954bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1955bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1952bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_operation_evaluation/gcc/basic_operation_evaluation.expected b/src/test/correct/basic_operation_evaluation/gcc/basic_operation_evaluation.expected index c51655fb8..44bceb181 100644 --- a/src/test/correct/basic_operation_evaluation/gcc/basic_operation_evaluation.expected +++ b/src/test/correct/basic_operation_evaluation/gcc/basic_operation_evaluation.expected @@ -40,10 +40,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -56,12 +52,9 @@ function {:extern} {:bvbuiltin "sign_extend 1"} sign_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1948bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1949bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1950bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1951bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1948bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -69,8 +62,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -87,20 +80,14 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R2, Gamma_R31, Gamma_stack, R0, R1, R2, R31, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1948bv64) == 1bv8); - free requires (memory_load8_le(mem, 1949bv64) == 0bv8); - free requires (memory_load8_le(mem, 1950bv64) == 2bv8); - free requires (memory_load8_le(mem, 1951bv64) == 0bv8); + free requires (memory_load32_le(mem, 1948bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1948bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1949bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1950bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1951bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1948bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_operation_evaluation/gcc/basic_operation_evaluation_gtirb.expected b/src/test/correct/basic_operation_evaluation/gcc/basic_operation_evaluation_gtirb.expected index c85b41e05..b5ec9e0ac 100644 --- a/src/test/correct/basic_operation_evaluation/gcc/basic_operation_evaluation_gtirb.expected +++ b/src/test/correct/basic_operation_evaluation/gcc/basic_operation_evaluation_gtirb.expected @@ -39,10 +39,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -55,12 +51,9 @@ function {:extern} {:bvbuiltin "sign_extend 1"} sign_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1948bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1949bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1950bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1951bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1948bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -68,8 +61,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -86,20 +79,14 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R2, Gamma_R31, Gamma_stack, R0, R1, R2, R31, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1948bv64) == 1bv8); - free requires (memory_load8_le(mem, 1949bv64) == 0bv8); - free requires (memory_load8_le(mem, 1950bv64) == 2bv8); - free requires (memory_load8_le(mem, 1951bv64) == 0bv8); + free requires (memory_load32_le(mem, 1948bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1948bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1949bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1950bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1951bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1948bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_sec_policy_read/clang/basic_sec_policy_read.expected b/src/test/correct/basic_sec_policy_read/clang/basic_sec_policy_read.expected index 461154d55..c8bb1c0d2 100644 --- a/src/test/correct/basic_sec_policy_read/clang/basic_sec_policy_read.expected +++ b/src/test/correct/basic_sec_policy_read/clang/basic_sec_policy_read.expected @@ -20,8 +20,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -47,10 +47,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -62,10 +58,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) != 0bv32) ==> (memory_load32_le(mem, $z_addr) != 0bv32)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -100,20 +93,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_sec_policy_read/clang/basic_sec_policy_read_gtirb.expected b/src/test/correct/basic_sec_policy_read/clang/basic_sec_policy_read_gtirb.expected index 818a0a62d..ab49772d7 100644 --- a/src/test/correct/basic_sec_policy_read/clang/basic_sec_policy_read_gtirb.expected +++ b/src/test/correct/basic_sec_policy_read/clang/basic_sec_policy_read_gtirb.expected @@ -20,8 +20,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -46,10 +46,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -60,10 +56,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) != 0bv32) ==> (memory_load32_le(mem, $z_addr) != 0bv32)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -98,20 +91,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_sec_policy_read/clang_O2/basic_sec_policy_read.expected b/src/test/correct/basic_sec_policy_read/clang_O2/basic_sec_policy_read.expected index d45dc06e1..570c5a400 100644 --- a/src/test/correct/basic_sec_policy_read/clang_O2/basic_sec_policy_read.expected +++ b/src/test/correct/basic_sec_policy_read/clang_O2/basic_sec_policy_read.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -41,10 +41,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} {:bvbuiltin "sign_extend 1"} sign_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); @@ -52,10 +48,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) != 0bv32) ==> (memory_load32_le(mem, $z_addr) != 0bv32)); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -90,18 +83,12 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R8, Gamma_R9, Gamma_VF, Gamma_ZF, Gamma_mem, NF, R0, R8, R9, VF, ZF, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1860bv64) == 1bv8); - free requires (memory_load8_le(mem, 1861bv64) == 0bv8); - free requires (memory_load8_le(mem, 1862bv64) == 2bv8); - free requires (memory_load8_le(mem, 1863bv64) == 0bv8); + free requires (memory_load32_le(mem, 1860bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_sec_policy_read/clang_O2/basic_sec_policy_read_gtirb.expected b/src/test/correct/basic_sec_policy_read/clang_O2/basic_sec_policy_read_gtirb.expected index 95b1fe31e..14bd77436 100644 --- a/src/test/correct/basic_sec_policy_read/clang_O2/basic_sec_policy_read_gtirb.expected +++ b/src/test/correct/basic_sec_policy_read/clang_O2/basic_sec_policy_read_gtirb.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -40,20 +40,13 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) != 0bv32) ==> (memory_load32_le(mem, $z_addr) != 0bv32)); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -88,18 +81,12 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R8, Gamma_R9, Gamma_VF, Gamma_ZF, Gamma_mem, NF, R0, R8, R9, VF, ZF, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1860bv64) == 1bv8); - free requires (memory_load8_le(mem, 1861bv64) == 0bv8); - free requires (memory_load8_le(mem, 1862bv64) == 2bv8); - free requires (memory_load8_le(mem, 1863bv64) == 0bv8); + free requires (memory_load32_le(mem, 1860bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_sec_policy_read/clang_pic/basic_sec_policy_read.expected b/src/test/correct/basic_sec_policy_read/clang_pic/basic_sec_policy_read.expected index cf1f9fc9c..d01c59e33 100644 --- a/src/test/correct/basic_sec_policy_read/clang_pic/basic_sec_policy_read.expected +++ b/src/test/correct/basic_sec_policy_read/clang_pic/basic_sec_policy_read.expected @@ -20,8 +20,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -51,10 +51,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -66,16 +62,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) != 0bv32) ==> (memory_load32_le(mem, $z_addr) != 0bv32)); - free ensures (memory_load8_le(mem, 1968bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1969bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1970bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1971bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1968bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -106,28 +99,22 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1968bv64) == 1bv8); - free requires (memory_load8_le(mem, 1969bv64) == 0bv8); - free requires (memory_load8_le(mem, 1970bv64) == 2bv8); - free requires (memory_load8_le(mem, 1971bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1968bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69688bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1968bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1969bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1970bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1971bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1968bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_sec_policy_read/clang_pic/basic_sec_policy_read_gtirb.expected b/src/test/correct/basic_sec_policy_read/clang_pic/basic_sec_policy_read_gtirb.expected index d74b467e4..66e17bf7d 100644 --- a/src/test/correct/basic_sec_policy_read/clang_pic/basic_sec_policy_read_gtirb.expected +++ b/src/test/correct/basic_sec_policy_read/clang_pic/basic_sec_policy_read_gtirb.expected @@ -20,8 +20,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -50,10 +50,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -64,16 +60,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) != 0bv32) ==> (memory_load32_le(mem, $z_addr) != 0bv32)); - free ensures (memory_load8_le(mem, 1968bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1969bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1970bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1971bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1968bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -104,28 +97,22 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1968bv64) == 1bv8); - free requires (memory_load8_le(mem, 1969bv64) == 0bv8); - free requires (memory_load8_le(mem, 1970bv64) == 2bv8); - free requires (memory_load8_le(mem, 1971bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1968bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69688bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1968bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1969bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1970bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1971bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1968bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_sec_policy_read/gcc/basic_sec_policy_read.expected b/src/test/correct/basic_sec_policy_read/gcc/basic_sec_policy_read.expected index d4870579e..7f1f47659 100644 --- a/src/test/correct/basic_sec_policy_read/gcc/basic_sec_policy_read.expected +++ b/src/test/correct/basic_sec_policy_read/gcc/basic_sec_policy_read.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -45,10 +45,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -60,10 +56,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) != 0bv32) ==> (memory_load32_le(mem, $z_addr) != 0bv32)); - free ensures (memory_load8_le(mem, 1888bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1889bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1890bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1891bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1888bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -98,20 +91,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1888bv64) == 1bv8); - free requires (memory_load8_le(mem, 1889bv64) == 0bv8); - free requires (memory_load8_le(mem, 1890bv64) == 2bv8); - free requires (memory_load8_le(mem, 1891bv64) == 0bv8); + free requires (memory_load32_le(mem, 1888bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1888bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1889bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1890bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1891bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1888bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_sec_policy_read/gcc/basic_sec_policy_read_gtirb.expected b/src/test/correct/basic_sec_policy_read/gcc/basic_sec_policy_read_gtirb.expected index fa9e9fab6..eaf8bfed3 100644 --- a/src/test/correct/basic_sec_policy_read/gcc/basic_sec_policy_read_gtirb.expected +++ b/src/test/correct/basic_sec_policy_read/gcc/basic_sec_policy_read_gtirb.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -44,10 +44,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -58,10 +54,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) != 0bv32) ==> (memory_load32_le(mem, $z_addr) != 0bv32)); - free ensures (memory_load8_le(mem, 1888bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1889bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1890bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1891bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1888bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -96,20 +89,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1888bv64) == 1bv8); - free requires (memory_load8_le(mem, 1889bv64) == 0bv8); - free requires (memory_load8_le(mem, 1890bv64) == 2bv8); - free requires (memory_load8_le(mem, 1891bv64) == 0bv8); + free requires (memory_load32_le(mem, 1888bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1888bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1889bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1890bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1891bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1888bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_sec_policy_read/gcc_O2/basic_sec_policy_read.expected b/src/test/correct/basic_sec_policy_read/gcc_O2/basic_sec_policy_read.expected index 62e0c7e6d..1d55828ab 100644 --- a/src/test/correct/basic_sec_policy_read/gcc_O2/basic_sec_policy_read.expected +++ b/src/test/correct/basic_sec_policy_read/gcc_O2/basic_sec_policy_read.expected @@ -10,8 +10,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69656bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -29,19 +29,12 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) != 0bv32) ==> (memory_load32_le(mem, $z_addr) != 0bv32)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -76,18 +69,12 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R2, Gamma_mem, R0, R1, R2, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_sec_policy_read/gcc_O2/basic_sec_policy_read_gtirb.expected b/src/test/correct/basic_sec_policy_read/gcc_O2/basic_sec_policy_read_gtirb.expected index a4f5cbd03..332049bab 100644 --- a/src/test/correct/basic_sec_policy_read/gcc_O2/basic_sec_policy_read_gtirb.expected +++ b/src/test/correct/basic_sec_policy_read/gcc_O2/basic_sec_policy_read_gtirb.expected @@ -10,8 +10,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69656bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -27,19 +27,12 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) != 0bv32) ==> (memory_load32_le(mem, $z_addr) != 0bv32)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -74,18 +67,12 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R2, Gamma_mem, R0, R1, R2, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_sec_policy_read/gcc_pic/basic_sec_policy_read.expected b/src/test/correct/basic_sec_policy_read/gcc_pic/basic_sec_policy_read.expected index 5c536f15f..cd3bca542 100644 --- a/src/test/correct/basic_sec_policy_read/gcc_pic/basic_sec_policy_read.expected +++ b/src/test/correct/basic_sec_policy_read/gcc_pic/basic_sec_policy_read.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -49,10 +49,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -64,16 +60,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) != 0bv32) ==> (memory_load32_le(mem, $z_addr) != 0bv32)); - free ensures (memory_load8_le(mem, 1952bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1953bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1954bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1955bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1952bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -104,28 +97,22 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1952bv64) == 1bv8); - free requires (memory_load8_le(mem, 1953bv64) == 0bv8); - free requires (memory_load8_le(mem, 1954bv64) == 2bv8); - free requires (memory_load8_le(mem, 1955bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load32_le(mem, 1952bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1952bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1953bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1954bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1955bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1952bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basic_sec_policy_read/gcc_pic/basic_sec_policy_read_gtirb.expected b/src/test/correct/basic_sec_policy_read/gcc_pic/basic_sec_policy_read_gtirb.expected index 9f286dc48..196da7a8f 100644 --- a/src/test/correct/basic_sec_policy_read/gcc_pic/basic_sec_policy_read_gtirb.expected +++ b/src/test/correct/basic_sec_policy_read/gcc_pic/basic_sec_policy_read_gtirb.expected @@ -18,8 +18,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -48,10 +48,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -62,16 +58,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $z_addr)) != 0bv32) ==> (memory_load32_le(mem, $z_addr) != 0bv32)); - free ensures (memory_load8_le(mem, 1952bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1953bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1954bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1955bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1952bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -102,28 +95,22 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1952bv64) == 1bv8); - free requires (memory_load8_le(mem, 1953bv64) == 0bv8); - free requires (memory_load8_le(mem, 1954bv64) == 2bv8); - free requires (memory_load8_le(mem, 1955bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load32_le(mem, 1952bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1952bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1953bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1954bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1955bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1952bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basic_sec_policy_write/clang/basic_sec_policy_write.expected b/src/test/correct/basic_sec_policy_write/clang/basic_sec_policy_write.expected index 87caaee9d..a3979b57c 100644 --- a/src/test/correct/basic_sec_policy_write/clang/basic_sec_policy_write.expected +++ b/src/test/correct/basic_sec_policy_write/clang/basic_sec_policy_write.expected @@ -16,8 +16,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -37,10 +37,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,10 +46,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr)); - free ensures (memory_load8_le(mem, 1888bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1889bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1890bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1891bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1888bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -89,20 +82,14 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1888bv64) == 1bv8); - free requires (memory_load8_le(mem, 1889bv64) == 0bv8); - free requires (memory_load8_le(mem, 1890bv64) == 2bv8); - free requires (memory_load8_le(mem, 1891bv64) == 0bv8); + free requires (memory_load32_le(mem, 1888bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1888bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1889bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1890bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1891bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1888bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_sec_policy_write/clang/basic_sec_policy_write_gtirb.expected b/src/test/correct/basic_sec_policy_write/clang/basic_sec_policy_write_gtirb.expected index 57ae9842b..9070091b0 100644 --- a/src/test/correct/basic_sec_policy_write/clang/basic_sec_policy_write_gtirb.expected +++ b/src/test/correct/basic_sec_policy_write/clang/basic_sec_policy_write_gtirb.expected @@ -16,8 +16,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -37,10 +37,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,10 +46,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr)); - free ensures (memory_load8_le(mem, 1888bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1889bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1890bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1891bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1888bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -89,20 +82,14 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1888bv64) == 1bv8); - free requires (memory_load8_le(mem, 1889bv64) == 0bv8); - free requires (memory_load8_le(mem, 1890bv64) == 2bv8); - free requires (memory_load8_le(mem, 1891bv64) == 0bv8); + free requires (memory_load32_le(mem, 1888bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1888bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1889bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1890bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1891bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1888bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_sec_policy_write/clang_O2/basic_sec_policy_write.expected b/src/test/correct/basic_sec_policy_write/clang_O2/basic_sec_policy_write.expected index 4eefb0b59..bfe1831a9 100644 --- a/src/test/correct/basic_sec_policy_write/clang_O2/basic_sec_policy_write.expected +++ b/src/test/correct/basic_sec_policy_write/clang_O2/basic_sec_policy_write.expected @@ -12,8 +12,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -45,10 +41,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr)); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -84,18 +77,12 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1860bv64) == 1bv8); - free requires (memory_load8_le(mem, 1861bv64) == 0bv8); - free requires (memory_load8_le(mem, 1862bv64) == 2bv8); - free requires (memory_load8_le(mem, 1863bv64) == 0bv8); + free requires (memory_load32_le(mem, 1860bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_sec_policy_write/clang_O2/basic_sec_policy_write_gtirb.expected b/src/test/correct/basic_sec_policy_write/clang_O2/basic_sec_policy_write_gtirb.expected index 9082fce74..7e4ff7591 100644 --- a/src/test/correct/basic_sec_policy_write/clang_O2/basic_sec_policy_write_gtirb.expected +++ b/src/test/correct/basic_sec_policy_write/clang_O2/basic_sec_policy_write_gtirb.expected @@ -12,8 +12,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -45,10 +41,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr)); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -84,18 +77,12 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1860bv64) == 1bv8); - free requires (memory_load8_le(mem, 1861bv64) == 0bv8); - free requires (memory_load8_le(mem, 1862bv64) == 2bv8); - free requires (memory_load8_le(mem, 1863bv64) == 0bv8); + free requires (memory_load32_le(mem, 1860bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basic_sec_policy_write/clang_pic/basic_sec_policy_write.expected b/src/test/correct/basic_sec_policy_write/clang_pic/basic_sec_policy_write.expected index 89f6dc312..f306f9dd4 100644 --- a/src/test/correct/basic_sec_policy_write/clang_pic/basic_sec_policy_write.expected +++ b/src/test/correct/basic_sec_policy_write/clang_pic/basic_sec_policy_write.expected @@ -16,8 +16,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -41,10 +41,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -54,16 +50,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr)); - free ensures (memory_load8_le(mem, 1960bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1961bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1962bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1963bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1960bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -95,28 +88,22 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1960bv64) == 1bv8); - free requires (memory_load8_le(mem, 1961bv64) == 0bv8); - free requires (memory_load8_le(mem, 1962bv64) == 2bv8); - free requires (memory_load8_le(mem, 1963bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69688bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1960bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1960bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1961bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1962bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1963bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1960bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_sec_policy_write/clang_pic/basic_sec_policy_write_gtirb.expected b/src/test/correct/basic_sec_policy_write/clang_pic/basic_sec_policy_write_gtirb.expected index 4f2b3c6e1..7bb922ca1 100644 --- a/src/test/correct/basic_sec_policy_write/clang_pic/basic_sec_policy_write_gtirb.expected +++ b/src/test/correct/basic_sec_policy_write/clang_pic/basic_sec_policy_write_gtirb.expected @@ -16,8 +16,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -41,10 +41,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -54,16 +50,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr)); - free ensures (memory_load8_le(mem, 1960bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1961bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1962bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1963bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1960bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -95,28 +88,22 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1960bv64) == 1bv8); - free requires (memory_load8_le(mem, 1961bv64) == 0bv8); - free requires (memory_load8_le(mem, 1962bv64) == 2bv8); - free requires (memory_load8_le(mem, 1963bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69688bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1960bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1960bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1961bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1962bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1963bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1960bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basic_sec_policy_write/gcc/basic_sec_policy_write.expected b/src/test/correct/basic_sec_policy_write/gcc/basic_sec_policy_write.expected index d0a626a88..e759f7728 100644 --- a/src/test/correct/basic_sec_policy_write/gcc/basic_sec_policy_write.expected +++ b/src/test/correct/basic_sec_policy_write/gcc/basic_sec_policy_write.expected @@ -12,8 +12,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -46,10 +42,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr)); - free ensures (memory_load8_le(mem, 1912bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1913bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1914bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1915bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1912bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -85,20 +78,14 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1912bv64) == 1bv8); - free requires (memory_load8_le(mem, 1913bv64) == 0bv8); - free requires (memory_load8_le(mem, 1914bv64) == 2bv8); - free requires (memory_load8_le(mem, 1915bv64) == 0bv8); + free requires (memory_load32_le(mem, 1912bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1912bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1913bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1914bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1915bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1912bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_sec_policy_write/gcc/basic_sec_policy_write_gtirb.expected b/src/test/correct/basic_sec_policy_write/gcc/basic_sec_policy_write_gtirb.expected index c3e3810f0..7ca7ac1e3 100644 --- a/src/test/correct/basic_sec_policy_write/gcc/basic_sec_policy_write_gtirb.expected +++ b/src/test/correct/basic_sec_policy_write/gcc/basic_sec_policy_write_gtirb.expected @@ -12,8 +12,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -46,10 +42,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr)); - free ensures (memory_load8_le(mem, 1912bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1913bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1914bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1915bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1912bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -85,20 +78,14 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1912bv64) == 1bv8); - free requires (memory_load8_le(mem, 1913bv64) == 0bv8); - free requires (memory_load8_le(mem, 1914bv64) == 2bv8); - free requires (memory_load8_le(mem, 1915bv64) == 0bv8); + free requires (memory_load32_le(mem, 1912bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1912bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1913bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1914bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1915bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1912bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basic_sec_policy_write/gcc_O2/basic_sec_policy_write.expected b/src/test/correct/basic_sec_policy_write/gcc_O2/basic_sec_policy_write.expected index 3555af022..77502d30d 100644 --- a/src/test/correct/basic_sec_policy_write/gcc_O2/basic_sec_policy_write.expected +++ b/src/test/correct/basic_sec_policy_write/gcc_O2/basic_sec_policy_write.expected @@ -12,8 +12,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -45,10 +41,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -84,18 +77,12 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_sec_policy_write/gcc_O2/basic_sec_policy_write_gtirb.expected b/src/test/correct/basic_sec_policy_write/gcc_O2/basic_sec_policy_write_gtirb.expected index e7ca31254..f1873f412 100644 --- a/src/test/correct/basic_sec_policy_write/gcc_O2/basic_sec_policy_write_gtirb.expected +++ b/src/test/correct/basic_sec_policy_write/gcc_O2/basic_sec_policy_write_gtirb.expected @@ -12,8 +12,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -45,10 +41,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -84,18 +77,12 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basic_sec_policy_write/gcc_pic/basic_sec_policy_write.expected b/src/test/correct/basic_sec_policy_write/gcc_pic/basic_sec_policy_write.expected index bddbe8673..aa7a508bf 100644 --- a/src/test/correct/basic_sec_policy_write/gcc_pic/basic_sec_policy_write.expected +++ b/src/test/correct/basic_sec_policy_write/gcc_pic/basic_sec_policy_write.expected @@ -12,8 +12,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -37,10 +37,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,16 +46,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr)); - free ensures (memory_load8_le(mem, 1976bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1977bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1978bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1979bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1976bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -91,28 +84,22 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1976bv64) == 1bv8); - free requires (memory_load8_le(mem, 1977bv64) == 0bv8); - free requires (memory_load8_le(mem, 1978bv64) == 2bv8); - free requires (memory_load8_le(mem, 1979bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load32_le(mem, 1976bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1976bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1977bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1978bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1979bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1976bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basic_sec_policy_write/gcc_pic/basic_sec_policy_write_gtirb.expected b/src/test/correct/basic_sec_policy_write/gcc_pic/basic_sec_policy_write_gtirb.expected index fec869bf6..700334cdf 100644 --- a/src/test/correct/basic_sec_policy_write/gcc_pic/basic_sec_policy_write_gtirb.expected +++ b/src/test/correct/basic_sec_policy_write/gcc_pic/basic_sec_policy_write_gtirb.expected @@ -12,8 +12,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -37,10 +37,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,16 +46,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr)); - free ensures (memory_load8_le(mem, 1976bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1977bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1978bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1979bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1976bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -91,28 +84,22 @@ procedure main(); requires (Gamma_R0 == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1976bv64) == 1bv8); - free requires (memory_load8_le(mem, 1977bv64) == 0bv8); - free requires (memory_load8_le(mem, 1978bv64) == 2bv8); - free requires (memory_load8_le(mem, 1979bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load32_le(mem, 1976bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1976bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1977bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1978bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1979bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 1976bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basicassign_gamma0/clang/basicassign_gamma0.expected b/src/test/correct/basicassign_gamma0/clang/basicassign_gamma0.expected index 90c2af906..665a042aa 100644 --- a/src/test/correct/basicassign_gamma0/clang/basicassign_gamma0.expected +++ b/src/test/correct/basicassign_gamma0/clang/basicassign_gamma0.expected @@ -10,7 +10,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69684bv64); const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -44,10 +40,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $secret_addr) == old(memory_load32_le(mem, $secret_addr))); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -78,18 +71,12 @@ procedure main(); requires (gamma_load32(Gamma_mem, $secret_addr) == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basicassign_gamma0/clang/basicassign_gamma0_gtirb.expected b/src/test/correct/basicassign_gamma0/clang/basicassign_gamma0_gtirb.expected index 84d1b7a26..08553b2a1 100644 --- a/src/test/correct/basicassign_gamma0/clang/basicassign_gamma0_gtirb.expected +++ b/src/test/correct/basicassign_gamma0/clang/basicassign_gamma0_gtirb.expected @@ -10,7 +10,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69684bv64); const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -44,10 +40,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $secret_addr) == old(memory_load32_le(mem, $secret_addr))); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -78,18 +71,12 @@ procedure main(); requires (gamma_load32(Gamma_mem, $secret_addr) == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/basicassign_gamma0/clang_pic/basicassign_gamma0.expected b/src/test/correct/basicassign_gamma0/clang_pic/basicassign_gamma0.expected index c6b1cbea4..ca1f84813 100644 --- a/src/test/correct/basicassign_gamma0/clang_pic/basicassign_gamma0.expected +++ b/src/test/correct/basicassign_gamma0/clang_pic/basicassign_gamma0.expected @@ -10,7 +10,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69684bv64); const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -35,10 +35,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -48,16 +44,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $secret_addr) == old(memory_load32_le(mem, $secret_addr))); - free ensures (memory_load8_le(mem, 1928bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1929bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1930bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1931bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); + free ensures (memory_load32_le(mem, 1928bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -84,26 +77,20 @@ procedure main(); requires (gamma_load32(Gamma_mem, $secret_addr) == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1928bv64) == 1bv8); - free requires (memory_load8_le(mem, 1929bv64) == 0bv8); - free requires (memory_load8_le(mem, 1930bv64) == 2bv8); - free requires (memory_load8_le(mem, 1931bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); + free requires (memory_load32_le(mem, 1928bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69688bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); - free ensures (memory_load8_le(mem, 1928bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1929bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1930bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1931bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1928bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basicassign_gamma0/clang_pic/basicassign_gamma0_gtirb.expected b/src/test/correct/basicassign_gamma0/clang_pic/basicassign_gamma0_gtirb.expected index e8be112ec..011c7af6c 100644 --- a/src/test/correct/basicassign_gamma0/clang_pic/basicassign_gamma0_gtirb.expected +++ b/src/test/correct/basicassign_gamma0/clang_pic/basicassign_gamma0_gtirb.expected @@ -10,7 +10,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69684bv64); const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -35,10 +35,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -48,16 +44,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $secret_addr) == old(memory_load32_le(mem, $secret_addr))); - free ensures (memory_load8_le(mem, 1928bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1929bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1930bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1931bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); + free ensures (memory_load32_le(mem, 1928bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -84,26 +77,20 @@ procedure main(); requires (gamma_load32(Gamma_mem, $secret_addr) == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1928bv64) == 1bv8); - free requires (memory_load8_le(mem, 1929bv64) == 0bv8); - free requires (memory_load8_le(mem, 1930bv64) == 2bv8); - free requires (memory_load8_le(mem, 1931bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); + free requires (memory_load32_le(mem, 1928bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69688bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); - free ensures (memory_load8_le(mem, 1928bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1929bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1930bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1931bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1928bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/basicassign_gamma0/gcc/basicassign_gamma0.expected b/src/test/correct/basicassign_gamma0/gcc/basicassign_gamma0.expected index da167b58a..360bde82a 100644 --- a/src/test/correct/basicassign_gamma0/gcc/basicassign_gamma0.expected +++ b/src/test/correct/basicassign_gamma0/gcc/basicassign_gamma0.expected @@ -8,7 +8,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69656bv64); const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -42,10 +38,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $secret_addr) == old(memory_load32_le(mem, $secret_addr))); - free ensures (memory_load8_le(mem, 1864bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1865bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1866bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1867bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1864bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -76,18 +69,12 @@ procedure main(); requires (gamma_load32(Gamma_mem, $secret_addr) == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1864bv64) == 1bv8); - free requires (memory_load8_le(mem, 1865bv64) == 0bv8); - free requires (memory_load8_le(mem, 1866bv64) == 2bv8); - free requires (memory_load8_le(mem, 1867bv64) == 0bv8); + free requires (memory_load32_le(mem, 1864bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1864bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1865bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1866bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1867bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1864bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basicassign_gamma0/gcc/basicassign_gamma0_gtirb.expected b/src/test/correct/basicassign_gamma0/gcc/basicassign_gamma0_gtirb.expected index f94eea40c..8e56ac1a2 100644 --- a/src/test/correct/basicassign_gamma0/gcc/basicassign_gamma0_gtirb.expected +++ b/src/test/correct/basicassign_gamma0/gcc/basicassign_gamma0_gtirb.expected @@ -8,7 +8,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69656bv64); const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -42,10 +38,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $secret_addr) == old(memory_load32_le(mem, $secret_addr))); - free ensures (memory_load8_le(mem, 1864bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1865bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1866bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1867bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1864bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -76,18 +69,12 @@ procedure main(); requires (gamma_load32(Gamma_mem, $secret_addr) == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1864bv64) == 1bv8); - free requires (memory_load8_le(mem, 1865bv64) == 0bv8); - free requires (memory_load8_le(mem, 1866bv64) == 2bv8); - free requires (memory_load8_le(mem, 1867bv64) == 0bv8); + free requires (memory_load32_le(mem, 1864bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1864bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1865bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1866bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1867bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1864bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/basicassign_gamma0/gcc_O2/basicassign_gamma0.expected b/src/test/correct/basicassign_gamma0/gcc_O2/basicassign_gamma0.expected index 23e72c990..a6c4c2b09 100644 --- a/src/test/correct/basicassign_gamma0/gcc_O2/basicassign_gamma0.expected +++ b/src/test/correct/basicassign_gamma0/gcc_O2/basicassign_gamma0.expected @@ -10,7 +10,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69656bv64); const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -44,10 +40,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $secret_addr) == old(memory_load32_le(mem, $secret_addr))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -78,18 +71,12 @@ procedure main(); requires (gamma_load32(Gamma_mem, $secret_addr) == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basicassign_gamma0/gcc_O2/basicassign_gamma0_gtirb.expected b/src/test/correct/basicassign_gamma0/gcc_O2/basicassign_gamma0_gtirb.expected index 20b34ec43..18d09ccfd 100644 --- a/src/test/correct/basicassign_gamma0/gcc_O2/basicassign_gamma0_gtirb.expected +++ b/src/test/correct/basicassign_gamma0/gcc_O2/basicassign_gamma0_gtirb.expected @@ -10,7 +10,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69656bv64); const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -44,10 +40,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $secret_addr) == old(memory_load32_le(mem, $secret_addr))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -78,18 +71,12 @@ procedure main(); requires (gamma_load32(Gamma_mem, $secret_addr) == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/basicassign_gamma0/gcc_pic/basicassign_gamma0.expected b/src/test/correct/basicassign_gamma0/gcc_pic/basicassign_gamma0.expected index 5465e2352..ced19431d 100644 --- a/src/test/correct/basicassign_gamma0/gcc_pic/basicassign_gamma0.expected +++ b/src/test/correct/basicassign_gamma0/gcc_pic/basicassign_gamma0.expected @@ -8,7 +8,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69656bv64); const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -46,16 +42,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $secret_addr) == old(memory_load32_le(mem, $secret_addr))); - free ensures (memory_load8_le(mem, 1928bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1929bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1930bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1931bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1928bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -82,26 +75,20 @@ procedure main(); requires (gamma_load32(Gamma_mem, $secret_addr) == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1928bv64) == 1bv8); - free requires (memory_load8_le(mem, 1929bv64) == 0bv8); - free requires (memory_load8_le(mem, 1930bv64) == 2bv8); - free requires (memory_load8_le(mem, 1931bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1928bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); - free ensures (memory_load8_le(mem, 1928bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1929bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1930bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1931bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 1928bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basicassign_gamma0/gcc_pic/basicassign_gamma0_gtirb.expected b/src/test/correct/basicassign_gamma0/gcc_pic/basicassign_gamma0_gtirb.expected index d66c514cb..653ba335f 100644 --- a/src/test/correct/basicassign_gamma0/gcc_pic/basicassign_gamma0_gtirb.expected +++ b/src/test/correct/basicassign_gamma0/gcc_pic/basicassign_gamma0_gtirb.expected @@ -8,7 +8,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69656bv64); const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -46,16 +42,13 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $secret_addr) == old(memory_load32_le(mem, $secret_addr))); - free ensures (memory_load8_le(mem, 1928bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1929bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1930bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1931bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1928bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -82,26 +75,20 @@ procedure main(); requires (gamma_load32(Gamma_mem, $secret_addr) == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1928bv64) == 1bv8); - free requires (memory_load8_le(mem, 1929bv64) == 0bv8); - free requires (memory_load8_le(mem, 1930bv64) == 2bv8); - free requires (memory_load8_le(mem, 1931bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1928bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); - free ensures (memory_load8_le(mem, 1928bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1929bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1930bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1931bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 1928bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/basicfree/clang/basicfree.expected b/src/test/correct/basicfree/clang/basicfree.expected index b76efd72b..1224cf5b2 100644 --- a/src/test/correct/basicfree/clang/basicfree.expected +++ b/src/test/correct/basicfree/clang/basicfree.expected @@ -20,7 +20,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2080bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -37,12 +37,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { @@ -55,12 +55,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2080bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2081bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2082bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2083bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2080bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69592bv64) == 2004bv64); @@ -68,8 +65,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -84,18 +81,12 @@ procedure {:extern} guarantee_reflexive(); procedure #free(); modifies Gamma_R16, Gamma_R17, R16, R17; - free requires (memory_load8_le(mem, 2080bv64) == 1bv8); - free requires (memory_load8_le(mem, 2081bv64) == 0bv8); - free requires (memory_load8_le(mem, 2082bv64) == 2bv8); - free requires (memory_load8_le(mem, 2083bv64) == 0bv8); + free requires (memory_load32_le(mem, 2080bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 2000bv64); free requires (memory_load64_le(mem, 69072bv64) == 1920bv64); free requires (memory_load64_le(mem, 69592bv64) == 2004bv64); free requires (memory_load64_le(mem, 69688bv64) == 69688bv64); - free ensures (memory_load8_le(mem, 2080bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2081bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2082bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2083bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2080bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69592bv64) == 2004bv64); @@ -105,10 +96,7 @@ procedure main(); modifies Gamma_R0, Gamma_R16, Gamma_R17, Gamma_R29, Gamma_R30, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_mem, Gamma_stack, R0, R16, R17, R29, R30, R31, R8, R9, mem, stack; free requires (memory_load64_le(mem, 69680bv64) == 0bv64); free requires (memory_load64_le(mem, 69688bv64) == 69688bv64); - free requires (memory_load8_le(mem, 2080bv64) == 1bv8); - free requires (memory_load8_le(mem, 2081bv64) == 0bv8); - free requires (memory_load8_le(mem, 2082bv64) == 2bv8); - free requires (memory_load8_le(mem, 2083bv64) == 0bv8); + free requires (memory_load32_le(mem, 2080bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 2000bv64); free requires (memory_load64_le(mem, 69072bv64) == 1920bv64); free requires (memory_load64_le(mem, 69592bv64) == 2004bv64); @@ -117,10 +105,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2080bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2081bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2082bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2083bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2080bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69592bv64) == 2004bv64); @@ -173,18 +158,12 @@ implementation main() procedure malloc(); modifies Gamma_R16, Gamma_R17, R16, R17; - free requires (memory_load8_le(mem, 2080bv64) == 1bv8); - free requires (memory_load8_le(mem, 2081bv64) == 0bv8); - free requires (memory_load8_le(mem, 2082bv64) == 2bv8); - free requires (memory_load8_le(mem, 2083bv64) == 0bv8); + free requires (memory_load32_le(mem, 2080bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 2000bv64); free requires (memory_load64_le(mem, 69072bv64) == 1920bv64); free requires (memory_load64_le(mem, 69592bv64) == 2004bv64); free requires (memory_load64_le(mem, 69688bv64) == 69688bv64); - free ensures (memory_load8_le(mem, 2080bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2081bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2082bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2083bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2080bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69592bv64) == 2004bv64); diff --git a/src/test/correct/basicfree/clang/basicfree_gtirb.expected b/src/test/correct/basicfree/clang/basicfree_gtirb.expected index 0afab7180..53b48009a 100644 --- a/src/test/correct/basicfree/clang/basicfree_gtirb.expected +++ b/src/test/correct/basicfree/clang/basicfree_gtirb.expected @@ -20,7 +20,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2080bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -37,12 +37,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { @@ -55,12 +55,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2080bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2081bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2082bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2083bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2080bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69592bv64) == 2004bv64); @@ -68,8 +65,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -84,18 +81,12 @@ procedure {:extern} guarantee_reflexive(); procedure FUN_650(); modifies Gamma_R16, Gamma_R17, Gamma_mem, R16, R17, mem; - free requires (memory_load8_le(mem, 2080bv64) == 1bv8); - free requires (memory_load8_le(mem, 2081bv64) == 0bv8); - free requires (memory_load8_le(mem, 2082bv64) == 2bv8); - free requires (memory_load8_le(mem, 2083bv64) == 0bv8); + free requires (memory_load32_le(mem, 2080bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 2000bv64); free requires (memory_load64_le(mem, 69072bv64) == 1920bv64); free requires (memory_load64_le(mem, 69592bv64) == 2004bv64); free requires (memory_load64_le(mem, 69688bv64) == 69688bv64); - free ensures (memory_load8_le(mem, 2080bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2081bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2082bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2083bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2080bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69592bv64) == 2004bv64); @@ -115,18 +106,12 @@ implementation FUN_650() procedure FUN_680(); modifies Gamma_R16, Gamma_R17, Gamma_mem, R16, R17, mem; - free requires (memory_load8_le(mem, 2080bv64) == 1bv8); - free requires (memory_load8_le(mem, 2081bv64) == 0bv8); - free requires (memory_load8_le(mem, 2082bv64) == 2bv8); - free requires (memory_load8_le(mem, 2083bv64) == 0bv8); + free requires (memory_load32_le(mem, 2080bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 2000bv64); free requires (memory_load64_le(mem, 69072bv64) == 1920bv64); free requires (memory_load64_le(mem, 69592bv64) == 2004bv64); free requires (memory_load64_le(mem, 69688bv64) == 69688bv64); - free ensures (memory_load8_le(mem, 2080bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2081bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2082bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2083bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2080bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69592bv64) == 2004bv64); @@ -148,10 +133,7 @@ procedure main(); modifies Gamma_R0, Gamma_R16, Gamma_R17, Gamma_R29, Gamma_R30, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_mem, Gamma_stack, R0, R16, R17, R29, R30, R31, R8, R9, mem, stack; free requires (memory_load64_le(mem, 69680bv64) == 0bv64); free requires (memory_load64_le(mem, 69688bv64) == 69688bv64); - free requires (memory_load8_le(mem, 2080bv64) == 1bv8); - free requires (memory_load8_le(mem, 2081bv64) == 0bv8); - free requires (memory_load8_le(mem, 2082bv64) == 2bv8); - free requires (memory_load8_le(mem, 2083bv64) == 0bv8); + free requires (memory_load32_le(mem, 2080bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 2000bv64); free requires (memory_load64_le(mem, 69072bv64) == 1920bv64); free requires (memory_load64_le(mem, 69592bv64) == 2004bv64); @@ -160,10 +142,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2080bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2081bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2082bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2083bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2080bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69592bv64) == 2004bv64); @@ -215,36 +194,24 @@ implementation main() } procedure malloc(); - free requires (memory_load8_le(mem, 2080bv64) == 1bv8); - free requires (memory_load8_le(mem, 2081bv64) == 0bv8); - free requires (memory_load8_le(mem, 2082bv64) == 2bv8); - free requires (memory_load8_le(mem, 2083bv64) == 0bv8); + free requires (memory_load32_le(mem, 2080bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 2000bv64); free requires (memory_load64_le(mem, 69072bv64) == 1920bv64); free requires (memory_load64_le(mem, 69592bv64) == 2004bv64); free requires (memory_load64_le(mem, 69688bv64) == 69688bv64); - free ensures (memory_load8_le(mem, 2080bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2081bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2082bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2083bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2080bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69592bv64) == 2004bv64); free ensures (memory_load64_le(mem, 69688bv64) == 69688bv64); procedure #free(); - free requires (memory_load8_le(mem, 2080bv64) == 1bv8); - free requires (memory_load8_le(mem, 2081bv64) == 0bv8); - free requires (memory_load8_le(mem, 2082bv64) == 2bv8); - free requires (memory_load8_le(mem, 2083bv64) == 0bv8); + free requires (memory_load32_le(mem, 2080bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 2000bv64); free requires (memory_load64_le(mem, 69072bv64) == 1920bv64); free requires (memory_load64_le(mem, 69592bv64) == 2004bv64); free requires (memory_load64_le(mem, 69688bv64) == 69688bv64); - free ensures (memory_load8_le(mem, 2080bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2081bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2082bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2083bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2080bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69592bv64) == 2004bv64); diff --git a/src/test/correct/basicfree/gcc/basicfree.expected b/src/test/correct/basicfree/gcc/basicfree.expected index ef418f173..bf81fd5ac 100644 --- a/src/test/correct/basicfree/gcc/basicfree.expected +++ b/src/test/correct/basicfree/gcc/basicfree.expected @@ -18,7 +18,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2076bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -35,12 +35,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { @@ -53,12 +53,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2076bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2077bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2078bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2079bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2076bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69000bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69616bv64) == 2004bv64); @@ -66,8 +63,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -82,18 +79,12 @@ procedure {:extern} guarantee_reflexive(); procedure #free(); modifies Gamma_R16, Gamma_R17, R16, R17; - free requires (memory_load8_le(mem, 2076bv64) == 1bv8); - free requires (memory_load8_le(mem, 2077bv64) == 0bv8); - free requires (memory_load8_le(mem, 2078bv64) == 2bv8); - free requires (memory_load8_le(mem, 2079bv64) == 0bv8); + free requires (memory_load32_le(mem, 2076bv64) == 131073bv32); free requires (memory_load64_le(mem, 69000bv64) == 2000bv64); free requires (memory_load64_le(mem, 69008bv64) == 1920bv64); free requires (memory_load64_le(mem, 69616bv64) == 2004bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2076bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2077bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2078bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2079bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2076bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69000bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69616bv64) == 2004bv64); @@ -103,10 +94,7 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R16, Gamma_R17, Gamma_R29, Gamma_R30, Gamma_R31, Gamma_mem, Gamma_stack, R0, R1, R16, R17, R29, R30, R31, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 2076bv64) == 1bv8); - free requires (memory_load8_le(mem, 2077bv64) == 0bv8); - free requires (memory_load8_le(mem, 2078bv64) == 2bv8); - free requires (memory_load8_le(mem, 2079bv64) == 0bv8); + free requires (memory_load32_le(mem, 2076bv64) == 131073bv32); free requires (memory_load64_le(mem, 69000bv64) == 2000bv64); free requires (memory_load64_le(mem, 69008bv64) == 1920bv64); free requires (memory_load64_le(mem, 69616bv64) == 2004bv64); @@ -115,10 +103,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2076bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2077bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2078bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2079bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2076bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69000bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69616bv64) == 2004bv64); @@ -168,18 +153,12 @@ implementation main() procedure malloc(); modifies Gamma_R16, Gamma_R17, R16, R17; - free requires (memory_load8_le(mem, 2076bv64) == 1bv8); - free requires (memory_load8_le(mem, 2077bv64) == 0bv8); - free requires (memory_load8_le(mem, 2078bv64) == 2bv8); - free requires (memory_load8_le(mem, 2079bv64) == 0bv8); + free requires (memory_load32_le(mem, 2076bv64) == 131073bv32); free requires (memory_load64_le(mem, 69000bv64) == 2000bv64); free requires (memory_load64_le(mem, 69008bv64) == 1920bv64); free requires (memory_load64_le(mem, 69616bv64) == 2004bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2076bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2077bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2078bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2079bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2076bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69000bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69616bv64) == 2004bv64); diff --git a/src/test/correct/basicfree/gcc/basicfree_gtirb.expected b/src/test/correct/basicfree/gcc/basicfree_gtirb.expected index 9b75b6fd3..e9dfa741d 100644 --- a/src/test/correct/basicfree/gcc/basicfree_gtirb.expected +++ b/src/test/correct/basicfree/gcc/basicfree_gtirb.expected @@ -18,7 +18,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2076bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -35,12 +35,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { @@ -53,12 +53,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2076bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2077bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2078bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2079bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2076bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69000bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69616bv64) == 2004bv64); @@ -66,8 +63,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -82,18 +79,12 @@ procedure {:extern} guarantee_reflexive(); procedure FUN_680(); modifies Gamma_R16, Gamma_R17, Gamma_mem, R16, R17, mem; - free requires (memory_load8_le(mem, 2076bv64) == 1bv8); - free requires (memory_load8_le(mem, 2077bv64) == 0bv8); - free requires (memory_load8_le(mem, 2078bv64) == 2bv8); - free requires (memory_load8_le(mem, 2079bv64) == 0bv8); + free requires (memory_load32_le(mem, 2076bv64) == 131073bv32); free requires (memory_load64_le(mem, 69000bv64) == 2000bv64); free requires (memory_load64_le(mem, 69008bv64) == 1920bv64); free requires (memory_load64_le(mem, 69616bv64) == 2004bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2076bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2077bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2078bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2079bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2076bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69000bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69616bv64) == 2004bv64); @@ -115,10 +106,7 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R16, Gamma_R17, Gamma_R29, Gamma_R30, Gamma_R31, Gamma_mem, Gamma_stack, R0, R1, R16, R17, R29, R30, R31, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 2076bv64) == 1bv8); - free requires (memory_load8_le(mem, 2077bv64) == 0bv8); - free requires (memory_load8_le(mem, 2078bv64) == 2bv8); - free requires (memory_load8_le(mem, 2079bv64) == 0bv8); + free requires (memory_load32_le(mem, 2076bv64) == 131073bv32); free requires (memory_load64_le(mem, 69000bv64) == 2000bv64); free requires (memory_load64_le(mem, 69008bv64) == 1920bv64); free requires (memory_load64_le(mem, 69616bv64) == 2004bv64); @@ -127,10 +115,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2076bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2077bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2078bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2079bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2076bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69000bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69616bv64) == 2004bv64); @@ -180,18 +165,12 @@ implementation main() procedure FUN_650(); modifies Gamma_R16, Gamma_R17, Gamma_mem, R16, R17, mem; - free requires (memory_load8_le(mem, 2076bv64) == 1bv8); - free requires (memory_load8_le(mem, 2077bv64) == 0bv8); - free requires (memory_load8_le(mem, 2078bv64) == 2bv8); - free requires (memory_load8_le(mem, 2079bv64) == 0bv8); + free requires (memory_load32_le(mem, 2076bv64) == 131073bv32); free requires (memory_load64_le(mem, 69000bv64) == 2000bv64); free requires (memory_load64_le(mem, 69008bv64) == 1920bv64); free requires (memory_load64_le(mem, 69616bv64) == 2004bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2076bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2077bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2078bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2079bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2076bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69000bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69616bv64) == 2004bv64); @@ -210,36 +189,24 @@ implementation FUN_650() } procedure #free(); - free requires (memory_load8_le(mem, 2076bv64) == 1bv8); - free requires (memory_load8_le(mem, 2077bv64) == 0bv8); - free requires (memory_load8_le(mem, 2078bv64) == 2bv8); - free requires (memory_load8_le(mem, 2079bv64) == 0bv8); + free requires (memory_load32_le(mem, 2076bv64) == 131073bv32); free requires (memory_load64_le(mem, 69000bv64) == 2000bv64); free requires (memory_load64_le(mem, 69008bv64) == 1920bv64); free requires (memory_load64_le(mem, 69616bv64) == 2004bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2076bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2077bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2078bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2079bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2076bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69000bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69616bv64) == 2004bv64); free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure malloc(); - free requires (memory_load8_le(mem, 2076bv64) == 1bv8); - free requires (memory_load8_le(mem, 2077bv64) == 0bv8); - free requires (memory_load8_le(mem, 2078bv64) == 2bv8); - free requires (memory_load8_le(mem, 2079bv64) == 0bv8); + free requires (memory_load32_le(mem, 2076bv64) == 131073bv32); free requires (memory_load64_le(mem, 69000bv64) == 2000bv64); free requires (memory_load64_le(mem, 69008bv64) == 1920bv64); free requires (memory_load64_le(mem, 69616bv64) == 2004bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2076bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2077bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2078bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2079bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2076bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69000bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1920bv64); free ensures (memory_load64_le(mem, 69616bv64) == 2004bv64); diff --git a/src/test/correct/cjump/clang/cjump.expected b/src/test/correct/cjump/clang/cjump.expected index bdc139149..e74e0ba9c 100644 --- a/src/test/correct/cjump/clang/cjump.expected +++ b/src/test/correct/cjump/clang/cjump.expected @@ -22,7 +22,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -49,10 +49,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -62,12 +58,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -75,8 +68,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -95,20 +88,14 @@ procedure main(); requires (gamma_load32(Gamma_mem, $y_addr) == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1916bv64) == 1bv8); - free requires (memory_load8_le(mem, 1917bv64) == 0bv8); - free requires (memory_load8_le(mem, 1918bv64) == 2bv8); - free requires (memory_load8_le(mem, 1919bv64) == 0bv8); + free requires (memory_load32_le(mem, 1916bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/cjump/clang/cjump_gtirb.expected b/src/test/correct/cjump/clang/cjump_gtirb.expected index 46eff7f81..db365724f 100644 --- a/src/test/correct/cjump/clang/cjump_gtirb.expected +++ b/src/test/correct/cjump/clang/cjump_gtirb.expected @@ -22,7 +22,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -48,10 +48,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -60,12 +56,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -73,8 +66,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -93,20 +86,14 @@ procedure main(); requires (gamma_load32(Gamma_mem, $y_addr) == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1916bv64) == 1bv8); - free requires (memory_load8_le(mem, 1917bv64) == 0bv8); - free requires (memory_load8_le(mem, 1918bv64) == 2bv8); - free requires (memory_load8_le(mem, 1919bv64) == 0bv8); + free requires (memory_load32_le(mem, 1916bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/cjump/clang_pic/cjump.expected b/src/test/correct/cjump/clang_pic/cjump.expected index 57808849f..b51a7b7e0 100644 --- a/src/test/correct/cjump/clang_pic/cjump.expected +++ b/src/test/correct/cjump/clang_pic/cjump.expected @@ -22,7 +22,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -53,10 +53,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -66,23 +62,20 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1992bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1993bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1994bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1995bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1992bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -101,28 +94,22 @@ procedure main(); requires (gamma_load32(Gamma_mem, $y_addr) == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1992bv64) == 1bv8); - free requires (memory_load8_le(mem, 1993bv64) == 0bv8); - free requires (memory_load8_le(mem, 1994bv64) == 2bv8); - free requires (memory_load8_le(mem, 1995bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); + free requires (memory_load32_le(mem, 1992bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1992bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1993bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1994bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1995bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + free ensures (memory_load32_le(mem, 1992bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/cjump/clang_pic/cjump_gtirb.expected b/src/test/correct/cjump/clang_pic/cjump_gtirb.expected index efb0e4ac5..9d0935d98 100644 --- a/src/test/correct/cjump/clang_pic/cjump_gtirb.expected +++ b/src/test/correct/cjump/clang_pic/cjump_gtirb.expected @@ -22,7 +22,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -52,10 +52,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -64,23 +60,20 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1992bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1993bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1994bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1995bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1992bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -99,28 +92,22 @@ procedure main(); requires (gamma_load32(Gamma_mem, $y_addr) == false); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1992bv64) == 1bv8); - free requires (memory_load8_le(mem, 1993bv64) == 0bv8); - free requires (memory_load8_le(mem, 1994bv64) == 2bv8); - free requires (memory_load8_le(mem, 1995bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); + free requires (memory_load32_le(mem, 1992bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1992bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1993bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1994bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1995bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + free ensures (memory_load32_le(mem, 1992bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/cjump/gcc/cjump.expected b/src/test/correct/cjump/gcc/cjump.expected index 870950bb8..136b5251d 100644 --- a/src/test/correct/cjump/gcc/cjump.expected +++ b/src/test/correct/cjump/gcc/cjump.expected @@ -16,7 +16,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -43,10 +43,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -56,12 +52,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1912bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1913bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1914bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1915bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1912bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -69,8 +62,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -89,18 +82,12 @@ procedure main(); requires (gamma_load32(Gamma_mem, $y_addr) == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1912bv64) == 1bv8); - free requires (memory_load8_le(mem, 1913bv64) == 0bv8); - free requires (memory_load8_le(mem, 1914bv64) == 2bv8); - free requires (memory_load8_le(mem, 1915bv64) == 0bv8); + free requires (memory_load32_le(mem, 1912bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1912bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1913bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1914bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1915bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1912bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/cjump/gcc/cjump_gtirb.expected b/src/test/correct/cjump/gcc/cjump_gtirb.expected index 6a1713dfe..35ffd3da4 100644 --- a/src/test/correct/cjump/gcc/cjump_gtirb.expected +++ b/src/test/correct/cjump/gcc/cjump_gtirb.expected @@ -16,7 +16,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -42,10 +42,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -54,12 +50,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1912bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1913bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1914bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1915bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1912bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -67,8 +60,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -87,18 +80,12 @@ procedure main(); requires (gamma_load32(Gamma_mem, $y_addr) == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1912bv64) == 1bv8); - free requires (memory_load8_le(mem, 1913bv64) == 0bv8); - free requires (memory_load8_le(mem, 1914bv64) == 2bv8); - free requires (memory_load8_le(mem, 1915bv64) == 0bv8); + free requires (memory_load32_le(mem, 1912bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1912bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1913bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1914bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1915bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1912bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/cjump/gcc_pic/cjump.expected b/src/test/correct/cjump/gcc_pic/cjump.expected index 1ea46d0bc..0162d3f40 100644 --- a/src/test/correct/cjump/gcc_pic/cjump.expected +++ b/src/test/correct/cjump/gcc_pic/cjump.expected @@ -16,7 +16,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -47,10 +47,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -60,23 +56,20 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1976bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1977bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1978bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1979bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1976bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -95,26 +88,20 @@ procedure main(); requires (gamma_load32(Gamma_mem, $y_addr) == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1976bv64) == 1bv8); - free requires (memory_load8_le(mem, 1977bv64) == 0bv8); - free requires (memory_load8_le(mem, 1978bv64) == 2bv8); - free requires (memory_load8_le(mem, 1979bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1976bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); - free ensures (memory_load8_le(mem, 1976bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1977bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1978bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1979bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 1976bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/cjump/gcc_pic/cjump_gtirb.expected b/src/test/correct/cjump/gcc_pic/cjump_gtirb.expected index 7f8c66615..0e9fdc0b6 100644 --- a/src/test/correct/cjump/gcc_pic/cjump_gtirb.expected +++ b/src/test/correct/cjump/gcc_pic/cjump_gtirb.expected @@ -16,7 +16,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -46,10 +46,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -58,23 +54,20 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1976bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1977bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1978bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1979bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1976bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -93,26 +86,20 @@ procedure main(); requires (gamma_load32(Gamma_mem, $y_addr) == false); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1976bv64) == 1bv8); - free requires (memory_load8_le(mem, 1977bv64) == 0bv8); - free requires (memory_load8_le(mem, 1978bv64) == 2bv8); - free requires (memory_load8_le(mem, 1979bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1976bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); - free ensures (memory_load8_le(mem, 1976bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1977bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1978bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1979bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 1976bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/floatingpoint/clang/floatingpoint_gtirb.expected b/src/test/correct/floatingpoint/clang/floatingpoint_gtirb.expected index eb7ad317b..376b609a9 100644 --- a/src/test/correct/floatingpoint/clang/floatingpoint_gtirb.expected +++ b/src/test/correct/floatingpoint/clang/floatingpoint_gtirb.expected @@ -29,7 +29,7 @@ function FPMul$64(bv64, bv64, bv32) returns (bv64); function FPSub$32(bv32, bv32, bv32) returns (bv32); function FPSub$64(bv64, bv64, bv32) returns (bv64); function FPToFixed$32$64(bv64, int, bool, bv32, int) returns (bv32); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -72,8 +72,8 @@ function {:extern} {:bvbuiltin "zero_extend 64"} zero_extend64_64(bv64) returns function {:extern} {:bvbuiltin "zero_extend 96"} zero_extend96_32(bv32) returns (bv128); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2088bv64) == 131073bv64); free ensures (memory_load64_le(mem, 2096bv64) == 4767034467667331754bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); @@ -83,8 +83,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -171,25 +171,25 @@ implementation main() assume {:captureState "1856$0"} true; V0, Gamma_V0 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 44bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 44bv64)); V1, Gamma_V1 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 40bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 40bv64)); - Exp9__5$0$14, Gamma_Exp9__5$0$14 := FPMul$32(V0[32:0], V1[32:0], FPCR), (Gamma_FPCR && (Gamma_V1 && Gamma_V0)); + Exp9__5$0$14, Gamma_Exp9__5$0$14 := FPMul$32(V0[32:0], V1[32:0], FPCR), true; V0, Gamma_V0 := zero_extend96_32(Exp9__5$0$14), Gamma_Exp9__5$0$14; stack, Gamma_stack := memory_store32_le(stack, bvadd64(R31, 36bv64), V0[32:0]), gamma_store32(Gamma_stack, bvadd64(R31, 36bv64), Gamma_V0); assume {:captureState "1872$0"} true; V0, Gamma_V0 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 44bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 44bv64)); V1, Gamma_V1 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 40bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 40bv64)); - Exp9__5$0$18, Gamma_Exp9__5$0$18 := FPDiv$32(V0[32:0], V1[32:0], FPCR), (Gamma_FPCR && (Gamma_V1 && Gamma_V0)); + Exp9__5$0$18, Gamma_Exp9__5$0$18 := FPDiv$32(V0[32:0], V1[32:0], FPCR), true; V0, Gamma_V0 := zero_extend96_32(Exp9__5$0$18), Gamma_Exp9__5$0$18; stack, Gamma_stack := memory_store32_le(stack, bvadd64(R31, 36bv64), V0[32:0]), gamma_store32(Gamma_stack, bvadd64(R31, 36bv64), Gamma_V0); assume {:captureState "1888$0"} true; V0, Gamma_V0 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 44bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 44bv64)); V1, Gamma_V1 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 40bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 40bv64)); - Exp9__5$0$22, Gamma_Exp9__5$0$22 := FPSub$32(V0[32:0], V1[32:0], FPCR), (Gamma_FPCR && (Gamma_V1 && Gamma_V0)); + Exp9__5$0$22, Gamma_Exp9__5$0$22 := FPSub$32(V0[32:0], V1[32:0], FPCR), true; V0, Gamma_V0 := zero_extend96_32(Exp9__5$0$22), Gamma_Exp9__5$0$22; stack, Gamma_stack := memory_store32_le(stack, bvadd64(R31, 36bv64), V0[32:0]), gamma_store32(Gamma_stack, bvadd64(R31, 36bv64), Gamma_V0); assume {:captureState "1904$0"} true; V0, Gamma_V0 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 44bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 44bv64)); V1, Gamma_V1 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 40bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 40bv64)); - Exp9__5$0$26, Gamma_Exp9__5$0$26 := FPAdd$32(V0[32:0], V1[32:0], FPCR), (Gamma_FPCR && (Gamma_V1 && Gamma_V0)); + Exp9__5$0$26, Gamma_Exp9__5$0$26 := FPAdd$32(V0[32:0], V1[32:0], FPCR), true; V0, Gamma_V0 := zero_extend96_32(Exp9__5$0$26), Gamma_Exp9__5$0$26; stack, Gamma_stack := memory_store32_le(stack, bvadd64(R31, 36bv64), V0[32:0]), gamma_store32(Gamma_stack, bvadd64(R31, 36bv64), Gamma_V0); assume {:captureState "1920$0"} true; @@ -201,53 +201,53 @@ implementation main() V0, Gamma_V0 := zero_extend64_64(memory_load64_le(stack, bvadd64(R31, 24bv64))), gamma_load64(Gamma_stack, bvadd64(R31, 24bv64)); V1, Gamma_V1 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 40bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 40bv64)); FPDecodeRounding8__7, Gamma_FPDecodeRounding8__7 := zero_extend1_2(FPCR[24:22]), Gamma_FPCR; - Exp10__6$0$33, Gamma_Exp10__6$0$33 := FPConvert$64$32(V1[32:0], FPCR), (Gamma_FPCR && Gamma_V1); + Exp10__6$0$33, Gamma_Exp10__6$0$33 := FPConvert$64$32(V1[32:0], FPCR), true; V1, Gamma_V1 := zero_extend64_64(Exp10__6$0$33), Gamma_Exp10__6$0$33; - Exp9__5$0$34, Gamma_Exp9__5$0$34 := FPMul$64(V0[64:0], V1[64:0], FPCR), (Gamma_FPCR && (Gamma_V1 && Gamma_V0)); + Exp9__5$0$34, Gamma_Exp9__5$0$34 := FPMul$64(V0[64:0], V1[64:0], FPCR), true; V0, Gamma_V0 := zero_extend64_64(Exp9__5$0$34), Gamma_Exp9__5$0$34; stack, Gamma_stack := memory_store64_le(stack, bvadd64(R31, 16bv64), V0[64:0]), gamma_store64(Gamma_stack, bvadd64(R31, 16bv64), Gamma_V0); assume {:captureState "1952$0"} true; V0, Gamma_V0 := zero_extend64_64(memory_load64_le(stack, bvadd64(R31, 24bv64))), gamma_load64(Gamma_stack, bvadd64(R31, 24bv64)); V1, Gamma_V1 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 40bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 40bv64)); FPDecodeRounding8__7, Gamma_FPDecodeRounding8__7 := zero_extend1_2(FPCR[24:22]), Gamma_FPCR; - Exp10__6$0$38, Gamma_Exp10__6$0$38 := FPConvert$64$32(V1[32:0], FPCR), (Gamma_FPCR && Gamma_V1); + Exp10__6$0$38, Gamma_Exp10__6$0$38 := FPConvert$64$32(V1[32:0], FPCR), true; V1, Gamma_V1 := zero_extend64_64(Exp10__6$0$38), Gamma_Exp10__6$0$38; - Exp9__5$0$39, Gamma_Exp9__5$0$39 := FPSub$64(V0[64:0], V1[64:0], FPCR), (Gamma_FPCR && (Gamma_V1 && Gamma_V0)); + Exp9__5$0$39, Gamma_Exp9__5$0$39 := FPSub$64(V0[64:0], V1[64:0], FPCR), true; V0, Gamma_V0 := zero_extend64_64(Exp9__5$0$39), Gamma_Exp9__5$0$39; stack, Gamma_stack := memory_store64_le(stack, bvadd64(R31, 16bv64), V0[64:0]), gamma_store64(Gamma_stack, bvadd64(R31, 16bv64), Gamma_V0); assume {:captureState "1972$0"} true; V0, Gamma_V0 := zero_extend64_64(memory_load64_le(stack, bvadd64(R31, 24bv64))), gamma_load64(Gamma_stack, bvadd64(R31, 24bv64)); V1, Gamma_V1 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 40bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 40bv64)); FPDecodeRounding8__7, Gamma_FPDecodeRounding8__7 := zero_extend1_2(FPCR[24:22]), Gamma_FPCR; - Exp10__6$0$43, Gamma_Exp10__6$0$43 := FPConvert$64$32(V1[32:0], FPCR), (Gamma_FPCR && Gamma_V1); + Exp10__6$0$43, Gamma_Exp10__6$0$43 := FPConvert$64$32(V1[32:0], FPCR), true; V1, Gamma_V1 := zero_extend64_64(Exp10__6$0$43), Gamma_Exp10__6$0$43; - Exp9__5$0$44, Gamma_Exp9__5$0$44 := FPAdd$64(V0[64:0], V1[64:0], FPCR), (Gamma_FPCR && (Gamma_V1 && Gamma_V0)); + Exp9__5$0$44, Gamma_Exp9__5$0$44 := FPAdd$64(V0[64:0], V1[64:0], FPCR), true; V0, Gamma_V0 := zero_extend64_64(Exp9__5$0$44), Gamma_Exp9__5$0$44; stack, Gamma_stack := memory_store64_le(stack, bvadd64(R31, 16bv64), V0[64:0]), gamma_store64(Gamma_stack, bvadd64(R31, 16bv64), Gamma_V0); assume {:captureState "1992$0"} true; V0, Gamma_V0 := zero_extend64_64(memory_load64_le(stack, bvadd64(R31, 24bv64))), gamma_load64(Gamma_stack, bvadd64(R31, 24bv64)); V1, Gamma_V1 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 40bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 40bv64)); FPDecodeRounding8__7, Gamma_FPDecodeRounding8__7 := zero_extend1_2(FPCR[24:22]), Gamma_FPCR; - Exp10__6$0$48, Gamma_Exp10__6$0$48 := FPConvert$64$32(V1[32:0], FPCR), (Gamma_FPCR && Gamma_V1); + Exp10__6$0$48, Gamma_Exp10__6$0$48 := FPConvert$64$32(V1[32:0], FPCR), true; V1, Gamma_V1 := zero_extend64_64(Exp10__6$0$48), Gamma_Exp10__6$0$48; - Exp9__5$0$49, Gamma_Exp9__5$0$49 := FPDiv$64(V0[64:0], V1[64:0], FPCR), (Gamma_FPCR && (Gamma_V1 && Gamma_V0)); + Exp9__5$0$49, Gamma_Exp9__5$0$49 := FPDiv$64(V0[64:0], V1[64:0], FPCR), true; V0, Gamma_V0 := zero_extend64_64(Exp9__5$0$49), Gamma_Exp9__5$0$49; stack, Gamma_stack := memory_store64_le(stack, bvadd64(R31, 16bv64), V0[64:0]), gamma_store64(Gamma_stack, bvadd64(R31, 16bv64), Gamma_V0); assume {:captureState "2012$0"} true; V0, Gamma_V0 := zero_extend64_64(memory_load64_le(stack, bvadd64(R31, 16bv64))), gamma_load64(Gamma_stack, bvadd64(R31, 16bv64)); FPDecodeRounding8__7, Gamma_FPDecodeRounding8__7 := zero_extend1_2(FPCR[24:22]), Gamma_FPCR; - Exp10__6$0$52, Gamma_Exp10__6$0$52 := FPConvert$32$64(V0[64:0], FPCR), (Gamma_FPCR && Gamma_V0); + Exp10__6$0$52, Gamma_Exp10__6$0$52 := FPConvert$32$64(V0[64:0], FPCR), true; V0, Gamma_V0 := zero_extend96_32(Exp10__6$0$52), Gamma_Exp10__6$0$52; stack, Gamma_stack := memory_store32_le(stack, bvadd64(R31, 44bv64), V0[32:0]), gamma_store32(Gamma_stack, bvadd64(R31, 44bv64), Gamma_V0); assume {:captureState "2024$0"} true; V0, Gamma_V0 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 40bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 40bv64)); FPDecodeRounding8__7, Gamma_FPDecodeRounding8__7 := zero_extend1_2(FPCR[24:22]), Gamma_FPCR; - Exp10__6$0$55, Gamma_Exp10__6$0$55 := FPConvert$64$32(V0[32:0], FPCR), (Gamma_FPCR && Gamma_V0); + Exp10__6$0$55, Gamma_Exp10__6$0$55 := FPConvert$64$32(V0[32:0], FPCR), true; V0, Gamma_V0 := zero_extend64_64(Exp10__6$0$55), Gamma_Exp10__6$0$55; stack, Gamma_stack := memory_store64_le(stack, bvadd64(R31, 24bv64), V0[64:0]), gamma_store64(Gamma_stack, bvadd64(R31, 24bv64), Gamma_V0); assume {:captureState "2036$0"} true; V0, Gamma_V0 := zero_extend64_64(memory_load64_le(stack, bvadd64(R31, 16bv64))), gamma_load64(Gamma_stack, bvadd64(R31, 16bv64)); - Exp7__5$0$58, Gamma_Exp7__5$0$58 := FPToFixed$32$64(V0[64:0], 0, false, FPCR, 3), (Gamma_FPCR && Gamma_V0); + Exp7__5$0$58, Gamma_Exp7__5$0$58 := FPToFixed$32$64(V0[64:0], 0, false, FPCR, 3), true; R8, Gamma_R8 := zero_extend32_32(Exp7__5$0$58), Gamma_Exp7__5$0$58; stack, Gamma_stack := memory_store32_le(stack, bvadd64(R31, 12bv64), R8[32:0]), gamma_store32(Gamma_stack, bvadd64(R31, 12bv64), Gamma_R8); assume {:captureState "2048$0"} true; diff --git a/src/test/correct/floatingpoint/gcc/floatingpoint_gtirb.expected b/src/test/correct/floatingpoint/gcc/floatingpoint_gtirb.expected index 065a70ac3..72d704dc3 100644 --- a/src/test/correct/floatingpoint/gcc/floatingpoint_gtirb.expected +++ b/src/test/correct/floatingpoint/gcc/floatingpoint_gtirb.expected @@ -27,7 +27,7 @@ function FPMul$64(bv64, bv64, bv32) returns (bv64); function FPSub$32(bv32, bv32, bv32) returns (bv32); function FPSub$64(bv64, bv64, bv32) returns (bv64); function FPToFixed$32$64(bv64, int, bool, bv32, int) returns (bv32); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -70,8 +70,8 @@ function {:extern} {:bvbuiltin "zero_extend 64"} zero_extend64_64(bv64) returns function {:extern} {:bvbuiltin "zero_extend 96"} zero_extend96_32(bv32) returns (bv128); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2080bv64) == 131073bv64); free ensures (memory_load64_le(mem, 2088bv64) == 4767034467667331754bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); @@ -81,8 +81,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -167,25 +167,25 @@ implementation main() assume {:captureState "1852$0"} true; V1, Gamma_V1 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 16bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 16bv64)); V0, Gamma_V0 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 20bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 20bv64)); - Exp9__5$0$13, Gamma_Exp9__5$0$13 := FPMul$32(V1[32:0], V0[32:0], FPCR), (Gamma_FPCR && (Gamma_V0 && Gamma_V1)); + Exp9__5$0$13, Gamma_Exp9__5$0$13 := FPMul$32(V1[32:0], V0[32:0], FPCR), true; V0, Gamma_V0 := zero_extend96_32(Exp9__5$0$13), Gamma_Exp9__5$0$13; stack, Gamma_stack := memory_store32_le(stack, bvadd64(R31, 24bv64), V0[32:0]), gamma_store32(Gamma_stack, bvadd64(R31, 24bv64), Gamma_V0); assume {:captureState "1868$0"} true; V0, Gamma_V0 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 20bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 20bv64)); V1, Gamma_V1 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 16bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 16bv64)); - Exp9__5$0$17, Gamma_Exp9__5$0$17 := FPDiv$32(V1[32:0], V0[32:0], FPCR), (Gamma_FPCR && (Gamma_V0 && Gamma_V1)); + Exp9__5$0$17, Gamma_Exp9__5$0$17 := FPDiv$32(V1[32:0], V0[32:0], FPCR), true; V0, Gamma_V0 := zero_extend96_32(Exp9__5$0$17), Gamma_Exp9__5$0$17; stack, Gamma_stack := memory_store32_le(stack, bvadd64(R31, 24bv64), V0[32:0]), gamma_store32(Gamma_stack, bvadd64(R31, 24bv64), Gamma_V0); assume {:captureState "1884$0"} true; V1, Gamma_V1 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 16bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 16bv64)); V0, Gamma_V0 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 20bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 20bv64)); - Exp9__5$0$21, Gamma_Exp9__5$0$21 := FPSub$32(V1[32:0], V0[32:0], FPCR), (Gamma_FPCR && (Gamma_V0 && Gamma_V1)); + Exp9__5$0$21, Gamma_Exp9__5$0$21 := FPSub$32(V1[32:0], V0[32:0], FPCR), true; V0, Gamma_V0 := zero_extend96_32(Exp9__5$0$21), Gamma_Exp9__5$0$21; stack, Gamma_stack := memory_store32_le(stack, bvadd64(R31, 24bv64), V0[32:0]), gamma_store32(Gamma_stack, bvadd64(R31, 24bv64), Gamma_V0); assume {:captureState "1900$0"} true; V1, Gamma_V1 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 16bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 16bv64)); V0, Gamma_V0 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 20bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 20bv64)); - Exp9__5$0$25, Gamma_Exp9__5$0$25 := FPAdd$32(V1[32:0], V0[32:0], FPCR), (Gamma_FPCR && (Gamma_V0 && Gamma_V1)); + Exp9__5$0$25, Gamma_Exp9__5$0$25 := FPAdd$32(V1[32:0], V0[32:0], FPCR), true; V0, Gamma_V0 := zero_extend96_32(Exp9__5$0$25), Gamma_Exp9__5$0$25; stack, Gamma_stack := memory_store32_le(stack, bvadd64(R31, 24bv64), V0[32:0]), gamma_store32(Gamma_stack, bvadd64(R31, 24bv64), Gamma_V0); assume {:captureState "1916$0"} true; @@ -196,54 +196,54 @@ implementation main() assume {:captureState "1928$0"} true; V0, Gamma_V0 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 20bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 20bv64)); FPDecodeRounding8__7, Gamma_FPDecodeRounding8__7 := zero_extend1_2(FPCR[24:22]), Gamma_FPCR; - Exp10__6$0$31, Gamma_Exp10__6$0$31 := FPConvert$64$32(V0[32:0], FPCR), (Gamma_FPCR && Gamma_V0); + Exp10__6$0$31, Gamma_Exp10__6$0$31 := FPConvert$64$32(V0[32:0], FPCR), true; V0, Gamma_V0 := zero_extend64_64(Exp10__6$0$31), Gamma_Exp10__6$0$31; V1, Gamma_V1 := zero_extend64_64(memory_load64_le(stack, bvadd64(R31, 32bv64))), gamma_load64(Gamma_stack, bvadd64(R31, 32bv64)); - Exp9__5$0$33, Gamma_Exp9__5$0$33 := FPMul$64(V1[64:0], V0[64:0], FPCR), (Gamma_FPCR && (Gamma_V0 && Gamma_V1)); + Exp9__5$0$33, Gamma_Exp9__5$0$33 := FPMul$64(V1[64:0], V0[64:0], FPCR), true; V0, Gamma_V0 := zero_extend64_64(Exp9__5$0$33), Gamma_Exp9__5$0$33; stack, Gamma_stack := memory_store64_le(stack, bvadd64(R31, 40bv64), V0[64:0]), gamma_store64(Gamma_stack, bvadd64(R31, 40bv64), Gamma_V0); assume {:captureState "1948$0"} true; V0, Gamma_V0 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 20bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 20bv64)); FPDecodeRounding8__7, Gamma_FPDecodeRounding8__7 := zero_extend1_2(FPCR[24:22]), Gamma_FPCR; - Exp10__6$0$36, Gamma_Exp10__6$0$36 := FPConvert$64$32(V0[32:0], FPCR), (Gamma_FPCR && Gamma_V0); + Exp10__6$0$36, Gamma_Exp10__6$0$36 := FPConvert$64$32(V0[32:0], FPCR), true; V0, Gamma_V0 := zero_extend64_64(Exp10__6$0$36), Gamma_Exp10__6$0$36; V1, Gamma_V1 := zero_extend64_64(memory_load64_le(stack, bvadd64(R31, 32bv64))), gamma_load64(Gamma_stack, bvadd64(R31, 32bv64)); - Exp9__5$0$38, Gamma_Exp9__5$0$38 := FPSub$64(V1[64:0], V0[64:0], FPCR), (Gamma_FPCR && (Gamma_V0 && Gamma_V1)); + Exp9__5$0$38, Gamma_Exp9__5$0$38 := FPSub$64(V1[64:0], V0[64:0], FPCR), true; V0, Gamma_V0 := zero_extend64_64(Exp9__5$0$38), Gamma_Exp9__5$0$38; stack, Gamma_stack := memory_store64_le(stack, bvadd64(R31, 40bv64), V0[64:0]), gamma_store64(Gamma_stack, bvadd64(R31, 40bv64), Gamma_V0); assume {:captureState "1968$0"} true; V0, Gamma_V0 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 20bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 20bv64)); FPDecodeRounding8__7, Gamma_FPDecodeRounding8__7 := zero_extend1_2(FPCR[24:22]), Gamma_FPCR; - Exp10__6$0$41, Gamma_Exp10__6$0$41 := FPConvert$64$32(V0[32:0], FPCR), (Gamma_FPCR && Gamma_V0); + Exp10__6$0$41, Gamma_Exp10__6$0$41 := FPConvert$64$32(V0[32:0], FPCR), true; V0, Gamma_V0 := zero_extend64_64(Exp10__6$0$41), Gamma_Exp10__6$0$41; V1, Gamma_V1 := zero_extend64_64(memory_load64_le(stack, bvadd64(R31, 32bv64))), gamma_load64(Gamma_stack, bvadd64(R31, 32bv64)); - Exp9__5$0$43, Gamma_Exp9__5$0$43 := FPAdd$64(V1[64:0], V0[64:0], FPCR), (Gamma_FPCR && (Gamma_V0 && Gamma_V1)); + Exp9__5$0$43, Gamma_Exp9__5$0$43 := FPAdd$64(V1[64:0], V0[64:0], FPCR), true; V0, Gamma_V0 := zero_extend64_64(Exp9__5$0$43), Gamma_Exp9__5$0$43; stack, Gamma_stack := memory_store64_le(stack, bvadd64(R31, 40bv64), V0[64:0]), gamma_store64(Gamma_stack, bvadd64(R31, 40bv64), Gamma_V0); assume {:captureState "1988$0"} true; V0, Gamma_V0 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 20bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 20bv64)); FPDecodeRounding8__7, Gamma_FPDecodeRounding8__7 := zero_extend1_2(FPCR[24:22]), Gamma_FPCR; - Exp10__6$0$46, Gamma_Exp10__6$0$46 := FPConvert$64$32(V0[32:0], FPCR), (Gamma_FPCR && Gamma_V0); + Exp10__6$0$46, Gamma_Exp10__6$0$46 := FPConvert$64$32(V0[32:0], FPCR), true; V0, Gamma_V0 := zero_extend64_64(Exp10__6$0$46), Gamma_Exp10__6$0$46; V1, Gamma_V1 := zero_extend64_64(memory_load64_le(stack, bvadd64(R31, 32bv64))), gamma_load64(Gamma_stack, bvadd64(R31, 32bv64)); - Exp9__5$0$48, Gamma_Exp9__5$0$48 := FPDiv$64(V1[64:0], V0[64:0], FPCR), (Gamma_FPCR && (Gamma_V0 && Gamma_V1)); + Exp9__5$0$48, Gamma_Exp9__5$0$48 := FPDiv$64(V1[64:0], V0[64:0], FPCR), true; V0, Gamma_V0 := zero_extend64_64(Exp9__5$0$48), Gamma_Exp9__5$0$48; stack, Gamma_stack := memory_store64_le(stack, bvadd64(R31, 40bv64), V0[64:0]), gamma_store64(Gamma_stack, bvadd64(R31, 40bv64), Gamma_V0); assume {:captureState "2008$0"} true; V0, Gamma_V0 := zero_extend64_64(memory_load64_le(stack, bvadd64(R31, 40bv64))), gamma_load64(Gamma_stack, bvadd64(R31, 40bv64)); FPDecodeRounding8__7, Gamma_FPDecodeRounding8__7 := zero_extend1_2(FPCR[24:22]), Gamma_FPCR; - Exp10__6$0$51, Gamma_Exp10__6$0$51 := FPConvert$32$64(V0[64:0], FPCR), (Gamma_FPCR && Gamma_V0); + Exp10__6$0$51, Gamma_Exp10__6$0$51 := FPConvert$32$64(V0[64:0], FPCR), true; V0, Gamma_V0 := zero_extend96_32(Exp10__6$0$51), Gamma_Exp10__6$0$51; stack, Gamma_stack := memory_store32_le(stack, bvadd64(R31, 16bv64), V0[32:0]), gamma_store32(Gamma_stack, bvadd64(R31, 16bv64), Gamma_V0); assume {:captureState "2020$0"} true; V0, Gamma_V0 := zero_extend96_32(memory_load32_le(stack, bvadd64(R31, 20bv64))), gamma_load32(Gamma_stack, bvadd64(R31, 20bv64)); FPDecodeRounding8__7, Gamma_FPDecodeRounding8__7 := zero_extend1_2(FPCR[24:22]), Gamma_FPCR; - Exp10__6$0$54, Gamma_Exp10__6$0$54 := FPConvert$64$32(V0[32:0], FPCR), (Gamma_FPCR && Gamma_V0); + Exp10__6$0$54, Gamma_Exp10__6$0$54 := FPConvert$64$32(V0[32:0], FPCR), true; V0, Gamma_V0 := zero_extend64_64(Exp10__6$0$54), Gamma_Exp10__6$0$54; stack, Gamma_stack := memory_store64_le(stack, bvadd64(R31, 32bv64), V0[64:0]), gamma_store64(Gamma_stack, bvadd64(R31, 32bv64), Gamma_V0); assume {:captureState "2032$0"} true; V0, Gamma_V0 := zero_extend64_64(memory_load64_le(stack, bvadd64(R31, 40bv64))), gamma_load64(Gamma_stack, bvadd64(R31, 40bv64)); - Exp7__5$0$57, Gamma_Exp7__5$0$57 := FPToFixed$32$64(V0[64:0], 0, false, FPCR, 3), (Gamma_FPCR && Gamma_V0); + Exp7__5$0$57, Gamma_Exp7__5$0$57 := FPToFixed$32$64(V0[64:0], 0, false, FPCR, 3), true; R0, Gamma_R0 := zero_extend32_32(Exp7__5$0$57), Gamma_Exp7__5$0$57; stack, Gamma_stack := memory_store32_le(stack, bvadd64(R31, 28bv64), R0[32:0]), gamma_store32(Gamma_stack, bvadd64(R31, 28bv64), Gamma_R0); assume {:captureState "2044$0"} true; diff --git a/src/test/correct/function/clang/function.expected b/src/test/correct/function/clang/function.expected index c1e865cdf..30f7cb520 100644 --- a/src/test/correct/function/clang/function.expected +++ b/src/test/correct/function/clang/function.expected @@ -18,7 +18,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -39,12 +39,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { @@ -57,12 +57,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1884bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1885bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1886bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1887bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1884bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -70,8 +67,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -86,19 +83,13 @@ procedure {:extern} guarantee_reflexive(); procedure get_two(); modifies Gamma_R0, R0; - free requires (memory_load8_le(mem, 1884bv64) == 1bv8); - free requires (memory_load8_le(mem, 1885bv64) == 0bv8); - free requires (memory_load8_le(mem, 1886bv64) == 2bv8); - free requires (memory_load8_le(mem, 1887bv64) == 0bv8); + free requires (memory_load32_le(mem, 1884bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (Gamma_R0 == true); - free ensures (memory_load8_le(mem, 1884bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1885bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1886bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1887bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1884bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -121,10 +112,7 @@ procedure main(); requires (gamma_load32(Gamma_mem, $y_addr) == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1884bv64) == 1bv8); - free requires (memory_load8_le(mem, 1885bv64) == 0bv8); - free requires (memory_load8_le(mem, 1886bv64) == 2bv8); - free requires (memory_load8_le(mem, 1887bv64) == 0bv8); + free requires (memory_load32_le(mem, 1884bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -133,10 +121,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1884bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1885bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1886bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1887bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1884bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/function/clang/function_gtirb.expected b/src/test/correct/function/clang/function_gtirb.expected index a72ab33b0..66b9258bb 100644 --- a/src/test/correct/function/clang/function_gtirb.expected +++ b/src/test/correct/function/clang/function_gtirb.expected @@ -18,7 +18,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -39,12 +39,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { @@ -57,12 +57,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1884bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1885bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1886bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1887bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1884bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -70,8 +67,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -90,10 +87,7 @@ procedure main(); requires (gamma_load32(Gamma_mem, $y_addr) == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1884bv64) == 1bv8); - free requires (memory_load8_le(mem, 1885bv64) == 0bv8); - free requires (memory_load8_le(mem, 1886bv64) == 2bv8); - free requires (memory_load8_le(mem, 1887bv64) == 0bv8); + free requires (memory_load32_le(mem, 1884bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -102,10 +96,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1884bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1885bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1886bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1887bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1884bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -152,19 +143,13 @@ implementation main() procedure get_two(); modifies Gamma_R0, R0; - free requires (memory_load8_le(mem, 1884bv64) == 1bv8); - free requires (memory_load8_le(mem, 1885bv64) == 0bv8); - free requires (memory_load8_le(mem, 1886bv64) == 2bv8); - free requires (memory_load8_le(mem, 1887bv64) == 0bv8); + free requires (memory_load32_le(mem, 1884bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (Gamma_R0 == true); - free ensures (memory_load8_le(mem, 1884bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1885bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1886bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1887bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1884bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/function/clang_pic/function.expected b/src/test/correct/function/clang_pic/function.expected index 9ee11f3e1..648dee795 100644 --- a/src/test/correct/function/clang_pic/function.expected +++ b/src/test/correct/function/clang_pic/function.expected @@ -18,7 +18,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -39,12 +39,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { @@ -57,23 +57,20 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -88,27 +85,21 @@ procedure {:extern} guarantee_reflexive(); procedure get_two(); modifies Gamma_R0, R0; - free requires (memory_load8_le(mem, 1956bv64) == 1bv8); - free requires (memory_load8_le(mem, 1957bv64) == 0bv8); - free requires (memory_load8_le(mem, 1958bv64) == 2bv8); - free requires (memory_load8_le(mem, 1959bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); + free requires (memory_load32_le(mem, 1956bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (Gamma_R0 == true); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation get_two() { @@ -127,30 +118,24 @@ procedure main(); requires (gamma_load32(Gamma_mem, $y_addr) == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1956bv64) == 1bv8); - free requires (memory_load8_le(mem, 1957bv64) == 0bv8); - free requires (memory_load8_le(mem, 1958bv64) == 2bv8); - free requires (memory_load8_le(mem, 1959bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); + free requires (memory_load32_le(mem, 1956bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/function/clang_pic/function_gtirb.expected b/src/test/correct/function/clang_pic/function_gtirb.expected index e594fca30..044491d73 100644 --- a/src/test/correct/function/clang_pic/function_gtirb.expected +++ b/src/test/correct/function/clang_pic/function_gtirb.expected @@ -18,7 +18,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -39,12 +39,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { @@ -57,23 +57,20 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -92,30 +89,24 @@ procedure main(); requires (gamma_load32(Gamma_mem, $y_addr) == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1956bv64) == 1bv8); - free requires (memory_load8_le(mem, 1957bv64) == 0bv8); - free requires (memory_load8_le(mem, 1958bv64) == 2bv8); - free requires (memory_load8_le(mem, 1959bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); + free requires (memory_load32_le(mem, 1956bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { @@ -162,27 +153,21 @@ implementation main() procedure get_two(); modifies Gamma_R0, R0; - free requires (memory_load8_le(mem, 1956bv64) == 1bv8); - free requires (memory_load8_le(mem, 1957bv64) == 0bv8); - free requires (memory_load8_le(mem, 1958bv64) == 2bv8); - free requires (memory_load8_le(mem, 1959bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); + free requires (memory_load32_le(mem, 1956bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (Gamma_R0 == true); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation get_two() { diff --git a/src/test/correct/function/gcc/function.expected b/src/test/correct/function/gcc/function.expected index beb93a1bf..933d00e90 100644 --- a/src/test/correct/function/gcc/function.expected +++ b/src/test/correct/function/gcc/function.expected @@ -16,7 +16,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -37,12 +37,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { @@ -56,12 +56,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -69,8 +66,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -85,19 +82,13 @@ procedure {:extern} guarantee_reflexive(); procedure get_two(); modifies Gamma_R0, R0; - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (Gamma_R0 == true); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -120,10 +111,7 @@ procedure main(); requires (gamma_load32(Gamma_mem, $y_addr) == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -132,10 +120,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/function/gcc/function_gtirb.expected b/src/test/correct/function/gcc/function_gtirb.expected index ee39081b8..e1acd3896 100644 --- a/src/test/correct/function/gcc/function_gtirb.expected +++ b/src/test/correct/function/gcc/function_gtirb.expected @@ -16,7 +16,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -37,12 +37,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { @@ -56,12 +56,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -69,8 +66,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -85,19 +82,13 @@ procedure {:extern} guarantee_reflexive(); procedure get_two(); modifies Gamma_R0, R0; - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (Gamma_R0 == true); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -120,10 +111,7 @@ procedure main(); requires (gamma_load32(Gamma_mem, $y_addr) == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -132,10 +120,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/function/gcc_pic/function.expected b/src/test/correct/function/gcc_pic/function.expected index 46be66a06..cc6c7f4f6 100644 --- a/src/test/correct/function/gcc_pic/function.expected +++ b/src/test/correct/function/gcc_pic/function.expected @@ -16,7 +16,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -37,12 +37,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { @@ -56,23 +56,20 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1960bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1961bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1962bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1963bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1960bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -87,27 +84,21 @@ procedure {:extern} guarantee_reflexive(); procedure get_two(); modifies Gamma_R0, R0; - free requires (memory_load8_le(mem, 1960bv64) == 1bv8); - free requires (memory_load8_le(mem, 1961bv64) == 0bv8); - free requires (memory_load8_le(mem, 1962bv64) == 2bv8); - free requires (memory_load8_le(mem, 1963bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1960bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (Gamma_R0 == true); - free ensures (memory_load8_le(mem, 1960bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1961bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1962bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1963bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1960bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation get_two() { @@ -126,30 +117,24 @@ procedure main(); requires (gamma_load32(Gamma_mem, $y_addr) == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1960bv64) == 1bv8); - free requires (memory_load8_le(mem, 1961bv64) == 0bv8); - free requires (memory_load8_le(mem, 1962bv64) == 2bv8); - free requires (memory_load8_le(mem, 1963bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1960bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1960bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1961bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1962bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1963bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1960bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/function/gcc_pic/function_gtirb.expected b/src/test/correct/function/gcc_pic/function_gtirb.expected index 9c9a51a5d..70b62c99d 100644 --- a/src/test/correct/function/gcc_pic/function_gtirb.expected +++ b/src/test/correct/function/gcc_pic/function_gtirb.expected @@ -16,7 +16,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -37,12 +37,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { @@ -56,23 +56,20 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1960bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1961bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1962bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1963bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1960bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -91,30 +88,24 @@ procedure main(); requires (gamma_load32(Gamma_mem, $y_addr) == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1960bv64) == 1bv8); - free requires (memory_load8_le(mem, 1961bv64) == 0bv8); - free requires (memory_load8_le(mem, 1962bv64) == 2bv8); - free requires (memory_load8_le(mem, 1963bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1960bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1960bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1961bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1962bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1963bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1960bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { @@ -162,27 +153,21 @@ implementation main() procedure get_two(); modifies Gamma_R0, R0; - free requires (memory_load8_le(mem, 1960bv64) == 1bv8); - free requires (memory_load8_le(mem, 1961bv64) == 0bv8); - free requires (memory_load8_le(mem, 1962bv64) == 2bv8); - free requires (memory_load8_le(mem, 1963bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1960bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (Gamma_R0 == true); - free ensures (memory_load8_le(mem, 1960bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1961bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1962bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1963bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1960bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation get_two() { diff --git a/src/test/correct/function1/clang/function1.expected b/src/test/correct/function1/clang/function1.expected index c99be706b..458e75574 100644 --- a/src/test/correct/function1/clang/function1.expected +++ b/src/test/correct/function1/clang/function1.expected @@ -28,7 +28,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69692bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69696bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -87,8 +87,8 @@ function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2024bv64) == 2924859843805185bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); @@ -97,8 +97,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/function1/clang/function1_gtirb.expected b/src/test/correct/function1/clang/function1_gtirb.expected index 6a09e1ad8..c05abc07c 100644 --- a/src/test/correct/function1/clang/function1_gtirb.expected +++ b/src/test/correct/function1/clang/function1_gtirb.expected @@ -28,7 +28,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69692bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69696bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -87,8 +87,8 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2024bv64) == 2924859843805185bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); @@ -97,8 +97,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/function1/clang_O2/function1.expected b/src/test/correct/function1/clang_O2/function1.expected index 721d20a89..07be739dc 100644 --- a/src/test/correct/function1/clang_O2/function1.expected +++ b/src/test/correct/function1/clang_O2/function1.expected @@ -30,7 +30,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69692bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69696bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -62,8 +62,8 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 1976bv64) == 2924859843805185bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); @@ -72,8 +72,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/function1/clang_O2/function1_gtirb.expected b/src/test/correct/function1/clang_O2/function1_gtirb.expected index a6814f2c8..15b52ffe0 100644 --- a/src/test/correct/function1/clang_O2/function1_gtirb.expected +++ b/src/test/correct/function1/clang_O2/function1_gtirb.expected @@ -30,7 +30,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69692bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69696bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -62,8 +62,8 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 1976bv64) == 2924859843805185bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); @@ -72,8 +72,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/function1/gcc/function1.expected b/src/test/correct/function1/gcc/function1.expected index f1e751c67..7679162c5 100644 --- a/src/test/correct/function1/gcc/function1.expected +++ b/src/test/correct/function1/gcc/function1.expected @@ -24,7 +24,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -82,8 +82,8 @@ function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2048bv64) == 131073bv64); free ensures (memory_load8_le(mem, 2056bv64) == 37bv8); free ensures (memory_load8_le(mem, 2057bv64) == 100bv8); @@ -96,8 +96,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/function1/gcc/function1_gtirb.expected b/src/test/correct/function1/gcc/function1_gtirb.expected index d659b4364..7c618985e 100644 --- a/src/test/correct/function1/gcc/function1_gtirb.expected +++ b/src/test/correct/function1/gcc/function1_gtirb.expected @@ -24,7 +24,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -82,8 +82,8 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2048bv64) == 131073bv64); free ensures (memory_load8_le(mem, 2056bv64) == 37bv8); free ensures (memory_load8_le(mem, 2057bv64) == 100bv8); @@ -96,8 +96,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/function1/gcc_O2/function1.expected b/src/test/correct/function1/gcc_O2/function1.expected index ec235edf3..d2d1638ca 100644 --- a/src/test/correct/function1/gcc_O2/function1.expected +++ b/src/test/correct/function1/gcc_O2/function1.expected @@ -26,7 +26,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -62,8 +62,8 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2048bv64) == 131073bv64); free ensures (memory_load8_le(mem, 2056bv64) == 37bv8); free ensures (memory_load8_le(mem, 2057bv64) == 100bv8); @@ -76,8 +76,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/function1/gcc_O2/function1_gtirb.expected b/src/test/correct/function1/gcc_O2/function1_gtirb.expected index c6c0b7514..65932f0f8 100644 --- a/src/test/correct/function1/gcc_O2/function1_gtirb.expected +++ b/src/test/correct/function1/gcc_O2/function1_gtirb.expected @@ -26,7 +26,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -62,8 +62,8 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2048bv64) == 131073bv64); free ensures (memory_load8_le(mem, 2056bv64) == 37bv8); free ensures (memory_load8_le(mem, 2057bv64) == 100bv8); @@ -76,8 +76,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/functionpointer/clang/functionpointer_gtirb.expected b/src/test/correct/functionpointer/clang/functionpointer_gtirb.expected index 47885be86..291e95a84 100644 --- a/src/test/correct/functionpointer/clang/functionpointer_gtirb.expected +++ b/src/test/correct/functionpointer/clang/functionpointer_gtirb.expected @@ -22,7 +22,7 @@ var {:extern} VF: bv1; var {:extern} ZF: bv1; var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -56,10 +56,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -73,12 +69,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2052bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2053bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2054bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2055bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2052bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1860bv64); @@ -86,8 +79,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -102,18 +95,12 @@ procedure {:extern} guarantee_reflexive(); procedure set_seven(); modifies Gamma_R8, Gamma_R9, Gamma_mem, R8, R9, mem; - free requires (memory_load8_le(mem, 2052bv64) == 1bv8); - free requires (memory_load8_le(mem, 2053bv64) == 0bv8); - free requires (memory_load8_le(mem, 2054bv64) == 2bv8); - free requires (memory_load8_le(mem, 2055bv64) == 0bv8); + free requires (memory_load32_le(mem, 2052bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1860bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 2052bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2053bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2054bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2055bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2052bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1860bv64); @@ -137,18 +124,12 @@ implementation set_seven() procedure set_six(); modifies Gamma_R8, Gamma_R9, Gamma_mem, R8, R9, mem; - free requires (memory_load8_le(mem, 2052bv64) == 1bv8); - free requires (memory_load8_le(mem, 2053bv64) == 0bv8); - free requires (memory_load8_le(mem, 2054bv64) == 2bv8); - free requires (memory_load8_le(mem, 2055bv64) == 0bv8); + free requires (memory_load32_le(mem, 2052bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1860bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 2052bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2053bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2054bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2055bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2052bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1860bv64); @@ -172,18 +153,12 @@ implementation set_six() procedure set_two(); modifies Gamma_R8, Gamma_R9, Gamma_mem, R8, R9, mem; - free requires (memory_load8_le(mem, 2052bv64) == 1bv8); - free requires (memory_load8_le(mem, 2053bv64) == 0bv8); - free requires (memory_load8_le(mem, 2054bv64) == 2bv8); - free requires (memory_load8_le(mem, 2055bv64) == 0bv8); + free requires (memory_load32_le(mem, 2052bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1860bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 2052bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2053bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2054bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2055bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2052bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1860bv64); @@ -210,10 +185,7 @@ procedure main(); requires (Gamma_R0 == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 2052bv64) == 1bv8); - free requires (memory_load8_le(mem, 2053bv64) == 0bv8); - free requires (memory_load8_le(mem, 2054bv64) == 2bv8); - free requires (memory_load8_le(mem, 2055bv64) == 0bv8); + free requires (memory_load32_le(mem, 2052bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1860bv64); @@ -222,10 +194,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2052bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2053bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2054bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2055bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2052bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1860bv64); diff --git a/src/test/correct/functionpointer/clang_pic/functionpointer_gtirb.expected b/src/test/correct/functionpointer/clang_pic/functionpointer_gtirb.expected index 6ab5da5b5..520e1d13b 100644 --- a/src/test/correct/functionpointer/clang_pic/functionpointer_gtirb.expected +++ b/src/test/correct/functionpointer/clang_pic/functionpointer_gtirb.expected @@ -22,7 +22,7 @@ var {:extern} VF: bv1; var {:extern} ZF: bv1; var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -56,10 +56,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -73,25 +69,22 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2192bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2193bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2194bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2195bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 1960bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 2000bv64); - free ensures (memory_load64_le(mem, 69552bv64) == 69684bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2192bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69032bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69040bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69552bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69568bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 1960bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1980bv64); - free ensures (memory_load64_le(mem, 69032bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -106,30 +99,24 @@ procedure {:extern} guarantee_reflexive(); procedure set_seven(); modifies Gamma_R8, Gamma_R9, Gamma_mem, R8, R9, mem; - free requires (memory_load8_le(mem, 2192bv64) == 1bv8); - free requires (memory_load8_le(mem, 2193bv64) == 0bv8); - free requires (memory_load8_le(mem, 2194bv64) == 2bv8); - free requires (memory_load8_le(mem, 2195bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 1960bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 2000bv64); - free requires (memory_load64_le(mem, 69552bv64) == 69684bv64); + free requires (memory_load32_le(mem, 2192bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69032bv64) == 1936bv64); free requires (memory_load64_le(mem, 69040bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69552bv64) == 69684bv64); free requires (memory_load64_le(mem, 69568bv64) == 1940bv64); + free requires (memory_load64_le(mem, 69576bv64) == 1960bv64); + free requires (memory_load64_le(mem, 69584bv64) == 2000bv64); free requires (memory_load64_le(mem, 69592bv64) == 1980bv64); - free requires (memory_load64_le(mem, 69032bv64) == 1936bv64); - free ensures (memory_load8_le(mem, 2192bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2193bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2194bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2195bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 1960bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 2000bv64); - free ensures (memory_load64_le(mem, 69552bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 2192bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69032bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69040bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69552bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69568bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 1960bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1980bv64); - free ensures (memory_load64_le(mem, 69032bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation set_seven() { @@ -151,30 +138,24 @@ implementation set_seven() procedure set_six(); modifies Gamma_R8, Gamma_R9, Gamma_mem, R8, R9, mem; - free requires (memory_load8_le(mem, 2192bv64) == 1bv8); - free requires (memory_load8_le(mem, 2193bv64) == 0bv8); - free requires (memory_load8_le(mem, 2194bv64) == 2bv8); - free requires (memory_load8_le(mem, 2195bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 1960bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 2000bv64); - free requires (memory_load64_le(mem, 69552bv64) == 69684bv64); + free requires (memory_load32_le(mem, 2192bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69032bv64) == 1936bv64); free requires (memory_load64_le(mem, 69040bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69552bv64) == 69684bv64); free requires (memory_load64_le(mem, 69568bv64) == 1940bv64); + free requires (memory_load64_le(mem, 69576bv64) == 1960bv64); + free requires (memory_load64_le(mem, 69584bv64) == 2000bv64); free requires (memory_load64_le(mem, 69592bv64) == 1980bv64); - free requires (memory_load64_le(mem, 69032bv64) == 1936bv64); - free ensures (memory_load8_le(mem, 2192bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2193bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2194bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2195bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 1960bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 2000bv64); - free ensures (memory_load64_le(mem, 69552bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 2192bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69032bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69040bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69552bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69568bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 1960bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1980bv64); - free ensures (memory_load64_le(mem, 69032bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation set_six() { @@ -199,34 +180,28 @@ procedure main(); requires (Gamma_R0 == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 2192bv64) == 1bv8); - free requires (memory_load8_le(mem, 2193bv64) == 0bv8); - free requires (memory_load8_le(mem, 2194bv64) == 2bv8); - free requires (memory_load8_le(mem, 2195bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 1960bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 2000bv64); - free requires (memory_load64_le(mem, 69552bv64) == 69684bv64); + free requires (memory_load32_le(mem, 2192bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69032bv64) == 1936bv64); free requires (memory_load64_le(mem, 69040bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69552bv64) == 69684bv64); free requires (memory_load64_le(mem, 69568bv64) == 1940bv64); + free requires (memory_load64_le(mem, 69576bv64) == 1960bv64); + free requires (memory_load64_le(mem, 69584bv64) == 2000bv64); free requires (memory_load64_le(mem, 69592bv64) == 1980bv64); - free requires (memory_load64_le(mem, 69032bv64) == 1936bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2192bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2193bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2194bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2195bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 1960bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 2000bv64); - free ensures (memory_load64_le(mem, 69552bv64) == 69684bv64); + free ensures (memory_load32_le(mem, 2192bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69032bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69040bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69552bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69568bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 1960bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1980bv64); - free ensures (memory_load64_le(mem, 69032bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { @@ -422,30 +397,24 @@ implementation main() procedure set_two(); modifies Gamma_R8, Gamma_R9, Gamma_mem, R8, R9, mem; - free requires (memory_load8_le(mem, 2192bv64) == 1bv8); - free requires (memory_load8_le(mem, 2193bv64) == 0bv8); - free requires (memory_load8_le(mem, 2194bv64) == 2bv8); - free requires (memory_load8_le(mem, 2195bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 1960bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 2000bv64); - free requires (memory_load64_le(mem, 69552bv64) == 69684bv64); + free requires (memory_load32_le(mem, 2192bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69032bv64) == 1936bv64); free requires (memory_load64_le(mem, 69040bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69552bv64) == 69684bv64); free requires (memory_load64_le(mem, 69568bv64) == 1940bv64); + free requires (memory_load64_le(mem, 69576bv64) == 1960bv64); + free requires (memory_load64_le(mem, 69584bv64) == 2000bv64); free requires (memory_load64_le(mem, 69592bv64) == 1980bv64); - free requires (memory_load64_le(mem, 69032bv64) == 1936bv64); - free ensures (memory_load8_le(mem, 2192bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2193bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2194bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2195bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 1960bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 2000bv64); - free ensures (memory_load64_le(mem, 69552bv64) == 69684bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 2192bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69032bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69040bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69552bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69568bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 1960bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 2000bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1980bv64); - free ensures (memory_load64_le(mem, 69032bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation set_two() { diff --git a/src/test/correct/functionpointer/gcc/functionpointer_gtirb.expected b/src/test/correct/functionpointer/gcc/functionpointer_gtirb.expected index 1533649c5..42c6797e6 100644 --- a/src/test/correct/functionpointer/gcc/functionpointer_gtirb.expected +++ b/src/test/correct/functionpointer/gcc/functionpointer_gtirb.expected @@ -20,7 +20,7 @@ var {:extern} VF: bv1; var {:extern} ZF: bv1; var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -54,10 +54,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -71,12 +67,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2052bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2053bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2054bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2055bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2052bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1884bv64); @@ -84,8 +77,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -100,18 +93,12 @@ procedure {:extern} guarantee_reflexive(); procedure set_six(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; - free requires (memory_load8_le(mem, 2052bv64) == 1bv8); - free requires (memory_load8_le(mem, 2053bv64) == 0bv8); - free requires (memory_load8_le(mem, 2054bv64) == 2bv8); - free requires (memory_load8_le(mem, 2055bv64) == 0bv8); + free requires (memory_load32_le(mem, 2052bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1884bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2052bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2053bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2054bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2055bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2052bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1884bv64); @@ -139,10 +126,7 @@ procedure main(); requires (Gamma_R0 == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 2052bv64) == 1bv8); - free requires (memory_load8_le(mem, 2053bv64) == 0bv8); - free requires (memory_load8_le(mem, 2054bv64) == 2bv8); - free requires (memory_load8_le(mem, 2055bv64) == 0bv8); + free requires (memory_load32_le(mem, 2052bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1884bv64); @@ -151,10 +135,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2052bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2053bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2054bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2055bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2052bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1884bv64); @@ -318,18 +299,12 @@ implementation main() procedure set_seven(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; - free requires (memory_load8_le(mem, 2052bv64) == 1bv8); - free requires (memory_load8_le(mem, 2053bv64) == 0bv8); - free requires (memory_load8_le(mem, 2054bv64) == 2bv8); - free requires (memory_load8_le(mem, 2055bv64) == 0bv8); + free requires (memory_load32_le(mem, 2052bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1884bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2052bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2053bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2054bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2055bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2052bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1884bv64); @@ -354,18 +329,12 @@ implementation set_seven() procedure set_two(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; - free requires (memory_load8_le(mem, 2052bv64) == 1bv8); - free requires (memory_load8_le(mem, 2053bv64) == 0bv8); - free requires (memory_load8_le(mem, 2054bv64) == 2bv8); - free requires (memory_load8_le(mem, 2055bv64) == 0bv8); + free requires (memory_load32_le(mem, 2052bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1884bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2052bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2053bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2054bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2055bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2052bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1884bv64); diff --git a/src/test/correct/functionpointer/gcc_O2/functionpointer_gtirb.expected b/src/test/correct/functionpointer/gcc_O2/functionpointer_gtirb.expected index 617efea08..5efba0883 100644 --- a/src/test/correct/functionpointer/gcc_O2/functionpointer_gtirb.expected +++ b/src/test/correct/functionpointer/gcc_O2/functionpointer_gtirb.expected @@ -20,7 +20,7 @@ var {:extern} VF: bv1; var {:extern} ZF: bv1; var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -42,12 +42,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { @@ -62,12 +62,9 @@ function {:extern} {:bvbuiltin "sign_extend 1"} sign_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (bv33); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2020bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2021bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2022bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2023bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2020bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -75,8 +72,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -91,18 +88,12 @@ procedure {:extern} guarantee_reflexive(); procedure set_six(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; - free requires (memory_load8_le(mem, 2020bv64) == 1bv8); - free requires (memory_load8_le(mem, 2021bv64) == 0bv8); - free requires (memory_load8_le(mem, 2022bv64) == 2bv8); - free requires (memory_load8_le(mem, 2023bv64) == 0bv8); + free requires (memory_load32_le(mem, 2020bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1936bv64); free requires (memory_load64_le(mem, 69024bv64) == 1856bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2020bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2021bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2022bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2023bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2020bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -129,10 +120,7 @@ procedure main(); requires (Gamma_R0 == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 2020bv64) == 1bv8); - free requires (memory_load8_le(mem, 2021bv64) == 0bv8); - free requires (memory_load8_le(mem, 2022bv64) == 2bv8); - free requires (memory_load8_le(mem, 2023bv64) == 0bv8); + free requires (memory_load32_le(mem, 2020bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1936bv64); free requires (memory_load64_le(mem, 69024bv64) == 1856bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -141,10 +129,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2020bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2021bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2022bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2023bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2020bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/functionpointer/gcc_pic/functionpointer_gtirb.expected b/src/test/correct/functionpointer/gcc_pic/functionpointer_gtirb.expected index aa1b8397d..3e7676341 100644 --- a/src/test/correct/functionpointer/gcc_pic/functionpointer_gtirb.expected +++ b/src/test/correct/functionpointer/gcc_pic/functionpointer_gtirb.expected @@ -20,7 +20,7 @@ var {:extern} VF: bv1; var {:extern} ZF: bv1; var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -54,10 +54,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -71,25 +67,22 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2180bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2181bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2182bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2183bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 2012bv64); - free ensures (memory_load64_le(mem, 69576bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1988bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 1964bv64); - free ensures (memory_load64_le(mem, 68992bv64) == 1856bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2180bv64) == 131073bv32); free ensures (memory_load64_le(mem, 68984bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 68992bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69652bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 1964bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 2012bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1988bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -104,30 +97,24 @@ procedure {:extern} guarantee_reflexive(); procedure set_seven(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; - free requires (memory_load8_le(mem, 2180bv64) == 1bv8); - free requires (memory_load8_le(mem, 2181bv64) == 0bv8); - free requires (memory_load8_le(mem, 2182bv64) == 2bv8); - free requires (memory_load8_le(mem, 2183bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 2012bv64); - free requires (memory_load64_le(mem, 69576bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1988bv64); - free requires (memory_load64_le(mem, 69600bv64) == 1964bv64); - free requires (memory_load64_le(mem, 68992bv64) == 1856bv64); + free requires (memory_load32_le(mem, 2180bv64) == 131073bv32); free requires (memory_load64_le(mem, 68984bv64) == 1936bv64); + free requires (memory_load64_le(mem, 68992bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69652bv64); free requires (memory_load64_le(mem, 69592bv64) == 1940bv64); - free ensures (memory_load8_le(mem, 2180bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2181bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2182bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2183bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 2012bv64); - free ensures (memory_load64_le(mem, 69576bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1988bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 1964bv64); - free ensures (memory_load64_le(mem, 68992bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69600bv64) == 1964bv64); + free requires (memory_load64_le(mem, 69608bv64) == 2012bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1988bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 2180bv64) == 131073bv32); free ensures (memory_load64_le(mem, 68984bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 68992bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69652bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 1964bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 2012bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1988bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation set_seven() { @@ -152,34 +139,28 @@ procedure main(); requires (Gamma_R0 == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 2180bv64) == 1bv8); - free requires (memory_load8_le(mem, 2181bv64) == 0bv8); - free requires (memory_load8_le(mem, 2182bv64) == 2bv8); - free requires (memory_load8_le(mem, 2183bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 2012bv64); - free requires (memory_load64_le(mem, 69576bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1988bv64); - free requires (memory_load64_le(mem, 69600bv64) == 1964bv64); - free requires (memory_load64_le(mem, 68992bv64) == 1856bv64); + free requires (memory_load32_le(mem, 2180bv64) == 131073bv32); free requires (memory_load64_le(mem, 68984bv64) == 1936bv64); + free requires (memory_load64_le(mem, 68992bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69652bv64); free requires (memory_load64_le(mem, 69592bv64) == 1940bv64); + free requires (memory_load64_le(mem, 69600bv64) == 1964bv64); + free requires (memory_load64_le(mem, 69608bv64) == 2012bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1988bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2180bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2181bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2182bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2183bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 2012bv64); - free ensures (memory_load64_le(mem, 69576bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1988bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 1964bv64); - free ensures (memory_load64_le(mem, 68992bv64) == 1856bv64); + free ensures (memory_load32_le(mem, 2180bv64) == 131073bv32); free ensures (memory_load64_le(mem, 68984bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 68992bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69652bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 1964bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 2012bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1988bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { @@ -343,30 +324,24 @@ implementation main() procedure set_two(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; - free requires (memory_load8_le(mem, 2180bv64) == 1bv8); - free requires (memory_load8_le(mem, 2181bv64) == 0bv8); - free requires (memory_load8_le(mem, 2182bv64) == 2bv8); - free requires (memory_load8_le(mem, 2183bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 2012bv64); - free requires (memory_load64_le(mem, 69576bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1988bv64); - free requires (memory_load64_le(mem, 69600bv64) == 1964bv64); - free requires (memory_load64_le(mem, 68992bv64) == 1856bv64); + free requires (memory_load32_le(mem, 2180bv64) == 131073bv32); free requires (memory_load64_le(mem, 68984bv64) == 1936bv64); + free requires (memory_load64_le(mem, 68992bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69652bv64); free requires (memory_load64_le(mem, 69592bv64) == 1940bv64); - free ensures (memory_load8_le(mem, 2180bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2181bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2182bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2183bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 2012bv64); - free ensures (memory_load64_le(mem, 69576bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1988bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 1964bv64); - free ensures (memory_load64_le(mem, 68992bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69600bv64) == 1964bv64); + free requires (memory_load64_le(mem, 69608bv64) == 2012bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1988bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 2180bv64) == 131073bv32); free ensures (memory_load64_le(mem, 68984bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 68992bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69652bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 1964bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 2012bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1988bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation set_two() { @@ -388,30 +363,24 @@ implementation set_two() procedure set_six(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; - free requires (memory_load8_le(mem, 2180bv64) == 1bv8); - free requires (memory_load8_le(mem, 2181bv64) == 0bv8); - free requires (memory_load8_le(mem, 2182bv64) == 2bv8); - free requires (memory_load8_le(mem, 2183bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 2012bv64); - free requires (memory_load64_le(mem, 69576bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1988bv64); - free requires (memory_load64_le(mem, 69600bv64) == 1964bv64); - free requires (memory_load64_le(mem, 68992bv64) == 1856bv64); + free requires (memory_load32_le(mem, 2180bv64) == 131073bv32); free requires (memory_load64_le(mem, 68984bv64) == 1936bv64); + free requires (memory_load64_le(mem, 68992bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69652bv64); free requires (memory_load64_le(mem, 69592bv64) == 1940bv64); - free ensures (memory_load8_le(mem, 2180bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2181bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2182bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2183bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 2012bv64); - free ensures (memory_load64_le(mem, 69576bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1988bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 1964bv64); - free ensures (memory_load64_le(mem, 68992bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69600bv64) == 1964bv64); + free requires (memory_load64_le(mem, 69608bv64) == 2012bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1988bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 2180bv64) == 131073bv32); free ensures (memory_load64_le(mem, 68984bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 68992bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69652bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 1964bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 2012bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1988bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation set_six() { diff --git a/src/test/correct/functions_with_params/clang/functions_with_params.expected b/src/test/correct/functions_with_params/clang/functions_with_params.expected index d2b236b8c..26a4ae5d6 100644 --- a/src/test/correct/functions_with_params/clang/functions_with_params.expected +++ b/src/test/correct/functions_with_params/clang/functions_with_params.expected @@ -40,10 +40,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -55,12 +51,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1912bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1913bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1914bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1915bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1912bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -68,8 +61,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -86,10 +79,7 @@ procedure main(); modifies Gamma_R0, Gamma_R29, Gamma_R30, Gamma_R31, Gamma_R8, Gamma_stack, R0, R29, R30, R31, R8, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1912bv64) == 1bv8); - free requires (memory_load8_le(mem, 1913bv64) == 0bv8); - free requires (memory_load8_le(mem, 1914bv64) == 2bv8); - free requires (memory_load8_le(mem, 1915bv64) == 0bv8); + free requires (memory_load32_le(mem, 1912bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -98,10 +88,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1912bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1913bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1914bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1915bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1912bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -149,20 +136,14 @@ implementation main() procedure plus_one(); modifies Gamma_R0, Gamma_R31, Gamma_R8, Gamma_stack, R0, R31, R8, stack; - free requires (memory_load8_le(mem, 1912bv64) == 1bv8); - free requires (memory_load8_le(mem, 1913bv64) == 0bv8); - free requires (memory_load8_le(mem, 1914bv64) == 2bv8); - free requires (memory_load8_le(mem, 1915bv64) == 0bv8); + free requires (memory_load32_le(mem, 1912bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1912bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1913bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1914bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1915bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1912bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/functions_with_params/clang/functions_with_params_gtirb.expected b/src/test/correct/functions_with_params/clang/functions_with_params_gtirb.expected index 428eb1a3e..e73fc78e3 100644 --- a/src/test/correct/functions_with_params/clang/functions_with_params_gtirb.expected +++ b/src/test/correct/functions_with_params/clang/functions_with_params_gtirb.expected @@ -40,10 +40,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -55,12 +51,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1912bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1913bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1914bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1915bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1912bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -68,8 +61,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -86,10 +79,7 @@ procedure main(); modifies Gamma_R0, Gamma_R29, Gamma_R30, Gamma_R31, Gamma_R8, Gamma_stack, R0, R29, R30, R31, R8, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1912bv64) == 1bv8); - free requires (memory_load8_le(mem, 1913bv64) == 0bv8); - free requires (memory_load8_le(mem, 1914bv64) == 2bv8); - free requires (memory_load8_le(mem, 1915bv64) == 0bv8); + free requires (memory_load32_le(mem, 1912bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -98,10 +88,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1912bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1913bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1914bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1915bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1912bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -149,20 +136,14 @@ implementation main() procedure plus_one(); modifies Gamma_R0, Gamma_R31, Gamma_R8, Gamma_stack, R0, R31, R8, stack; - free requires (memory_load8_le(mem, 1912bv64) == 1bv8); - free requires (memory_load8_le(mem, 1913bv64) == 0bv8); - free requires (memory_load8_le(mem, 1914bv64) == 2bv8); - free requires (memory_load8_le(mem, 1915bv64) == 0bv8); + free requires (memory_load32_le(mem, 1912bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1912bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1913bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1914bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1915bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1912bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/functions_with_params/gcc/functions_with_params.expected b/src/test/correct/functions_with_params/gcc/functions_with_params.expected index 2adfff70a..c1563536b 100644 --- a/src/test/correct/functions_with_params/gcc/functions_with_params.expected +++ b/src/test/correct/functions_with_params/gcc/functions_with_params.expected @@ -38,10 +38,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -53,12 +49,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1904bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1905bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1906bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1907bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1904bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -66,8 +59,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -84,10 +77,7 @@ procedure main(); modifies Gamma_R0, Gamma_R29, Gamma_R30, Gamma_R31, Gamma_stack, R0, R29, R30, R31, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1904bv64) == 1bv8); - free requires (memory_load8_le(mem, 1905bv64) == 0bv8); - free requires (memory_load8_le(mem, 1906bv64) == 2bv8); - free requires (memory_load8_le(mem, 1907bv64) == 0bv8); + free requires (memory_load32_le(mem, 1904bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -96,10 +86,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1904bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1905bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1906bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1907bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1904bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -144,20 +131,14 @@ implementation main() procedure plus_one(); modifies Gamma_R0, Gamma_R31, Gamma_stack, R0, R31, stack; - free requires (memory_load8_le(mem, 1904bv64) == 1bv8); - free requires (memory_load8_le(mem, 1905bv64) == 0bv8); - free requires (memory_load8_le(mem, 1906bv64) == 2bv8); - free requires (memory_load8_le(mem, 1907bv64) == 0bv8); + free requires (memory_load32_le(mem, 1904bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1904bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1905bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1906bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1907bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1904bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/functions_with_params/gcc/functions_with_params_gtirb.expected b/src/test/correct/functions_with_params/gcc/functions_with_params_gtirb.expected index aba4779b9..98806b657 100644 --- a/src/test/correct/functions_with_params/gcc/functions_with_params_gtirb.expected +++ b/src/test/correct/functions_with_params/gcc/functions_with_params_gtirb.expected @@ -38,10 +38,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -53,12 +49,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1904bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1905bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1906bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1907bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1904bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -66,8 +59,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -82,20 +75,14 @@ procedure {:extern} guarantee_reflexive(); procedure plus_one(); modifies Gamma_R0, Gamma_R31, Gamma_stack, R0, R31, stack; - free requires (memory_load8_le(mem, 1904bv64) == 1bv8); - free requires (memory_load8_le(mem, 1905bv64) == 0bv8); - free requires (memory_load8_le(mem, 1906bv64) == 2bv8); - free requires (memory_load8_le(mem, 1907bv64) == 0bv8); + free requires (memory_load32_le(mem, 1904bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1904bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1905bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1906bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1907bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1904bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -121,10 +108,7 @@ procedure main(); modifies Gamma_R0, Gamma_R29, Gamma_R30, Gamma_R31, Gamma_stack, R0, R29, R30, R31, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1904bv64) == 1bv8); - free requires (memory_load8_le(mem, 1905bv64) == 0bv8); - free requires (memory_load8_le(mem, 1906bv64) == 2bv8); - free requires (memory_load8_le(mem, 1907bv64) == 0bv8); + free requires (memory_load32_le(mem, 1904bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -133,10 +117,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1904bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1905bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1906bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1907bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1904bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/ifbranches/clang/ifbranches.expected b/src/test/correct/ifbranches/clang/ifbranches.expected index 4c011a0ed..60d8d807e 100644 --- a/src/test/correct/ifbranches/clang/ifbranches.expected +++ b/src/test/correct/ifbranches/clang/ifbranches.expected @@ -45,10 +45,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -62,12 +58,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -75,8 +68,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -94,20 +87,14 @@ procedure main(); requires (Gamma_R0 == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/ifbranches/clang/ifbranches_gtirb.expected b/src/test/correct/ifbranches/clang/ifbranches_gtirb.expected index 63d86188d..19655971a 100644 --- a/src/test/correct/ifbranches/clang/ifbranches_gtirb.expected +++ b/src/test/correct/ifbranches/clang/ifbranches_gtirb.expected @@ -44,10 +44,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -60,12 +56,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -73,8 +66,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -92,20 +85,14 @@ procedure main(); requires (Gamma_R0 == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/ifbranches/clang_O2/ifbranches.expected b/src/test/correct/ifbranches/clang_O2/ifbranches.expected index edfefe37b..8f7c7b621 100644 --- a/src/test/correct/ifbranches/clang_O2/ifbranches.expected +++ b/src/test/correct/ifbranches/clang_O2/ifbranches.expected @@ -19,12 +19,12 @@ function {:extern} {:bvbuiltin "bvcomp"} bvcomp1(bv1, bv1) returns (bv1); function {:extern} {:bvbuiltin "bvcomp"} bvcomp32(bv32, bv32) returns (bv1); function {:extern} {:bvbuiltin "bvcomp"} bvcomp33(bv33, bv33) returns (bv1); function {:extern} {:bvbuiltin "bvnot"} bvnot1(bv1) returns (bv1); -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} {:bvbuiltin "sign_extend 1"} sign_extend1_32(bv32) returns (bv33); @@ -32,12 +32,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1848bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1849bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1850bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1851bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1848bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -45,8 +42,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -64,18 +61,12 @@ procedure main(); requires (Gamma_R0 == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1848bv64) == 1bv8); - free requires (memory_load8_le(mem, 1849bv64) == 0bv8); - free requires (memory_load8_le(mem, 1850bv64) == 2bv8); - free requires (memory_load8_le(mem, 1851bv64) == 0bv8); + free requires (memory_load32_le(mem, 1848bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1848bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1849bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1850bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1851bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1848bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/ifbranches/clang_O2/ifbranches_gtirb.expected b/src/test/correct/ifbranches/clang_O2/ifbranches_gtirb.expected index 3ce882dec..59e353194 100644 --- a/src/test/correct/ifbranches/clang_O2/ifbranches_gtirb.expected +++ b/src/test/correct/ifbranches/clang_O2/ifbranches_gtirb.expected @@ -18,24 +18,21 @@ function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); function {:extern} {:bvbuiltin "bvcomp"} bvcomp32(bv32, bv32) returns (bv1); function {:extern} {:bvbuiltin "bvcomp"} bvcomp33(bv33, bv33) returns (bv1); function {:extern} {:bvbuiltin "bvnot"} bvnot1(bv1) returns (bv1); -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1848bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1849bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1850bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1851bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1848bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -43,8 +40,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -62,18 +59,12 @@ procedure main(); requires (Gamma_R0 == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1848bv64) == 1bv8); - free requires (memory_load8_le(mem, 1849bv64) == 0bv8); - free requires (memory_load8_le(mem, 1850bv64) == 2bv8); - free requires (memory_load8_le(mem, 1851bv64) == 0bv8); + free requires (memory_load32_le(mem, 1848bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1848bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1849bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1850bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1851bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1848bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/ifbranches/gcc/ifbranches.expected b/src/test/correct/ifbranches/gcc/ifbranches.expected index 248272d12..1488af65e 100644 --- a/src/test/correct/ifbranches/gcc/ifbranches.expected +++ b/src/test/correct/ifbranches/gcc/ifbranches.expected @@ -43,10 +43,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -60,12 +56,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1904bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1905bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1906bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1907bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1904bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -73,8 +66,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -92,20 +85,14 @@ procedure main(); requires (Gamma_R0 == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1904bv64) == 1bv8); - free requires (memory_load8_le(mem, 1905bv64) == 0bv8); - free requires (memory_load8_le(mem, 1906bv64) == 2bv8); - free requires (memory_load8_le(mem, 1907bv64) == 0bv8); + free requires (memory_load32_le(mem, 1904bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1904bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1905bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1906bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1907bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1904bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/ifbranches/gcc/ifbranches_gtirb.expected b/src/test/correct/ifbranches/gcc/ifbranches_gtirb.expected index 353031041..46089e34d 100644 --- a/src/test/correct/ifbranches/gcc/ifbranches_gtirb.expected +++ b/src/test/correct/ifbranches/gcc/ifbranches_gtirb.expected @@ -42,10 +42,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -58,12 +54,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1904bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1905bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1906bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1907bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1904bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -71,8 +64,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -90,20 +83,14 @@ procedure main(); requires (Gamma_R0 == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1904bv64) == 1bv8); - free requires (memory_load8_le(mem, 1905bv64) == 0bv8); - free requires (memory_load8_le(mem, 1906bv64) == 2bv8); - free requires (memory_load8_le(mem, 1907bv64) == 0bv8); + free requires (memory_load32_le(mem, 1904bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1904bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1905bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1906bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1907bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1904bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/ifbranches/gcc_O2/ifbranches.expected b/src/test/correct/ifbranches/gcc_O2/ifbranches.expected index 6745e11ac..1fd37da61 100644 --- a/src/test/correct/ifbranches/gcc_O2/ifbranches.expected +++ b/src/test/correct/ifbranches/gcc_O2/ifbranches.expected @@ -17,12 +17,12 @@ function {:extern} {:bvbuiltin "bvcomp"} bvcomp1(bv1, bv1) returns (bv1); function {:extern} {:bvbuiltin "bvcomp"} bvcomp32(bv32, bv32) returns (bv1); function {:extern} {:bvbuiltin "bvcomp"} bvcomp33(bv33, bv33) returns (bv1); function {:extern} {:bvbuiltin "bvnot"} bvnot1(bv1) returns (bv1); -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} {:bvbuiltin "sign_extend 1"} sign_extend1_32(bv32) returns (bv33); @@ -30,12 +30,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -43,8 +40,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -62,18 +59,12 @@ procedure main(); requires (Gamma_R0 == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/ifbranches/gcc_O2/ifbranches_gtirb.expected b/src/test/correct/ifbranches/gcc_O2/ifbranches_gtirb.expected index a2b4c6448..54cf79dd7 100644 --- a/src/test/correct/ifbranches/gcc_O2/ifbranches_gtirb.expected +++ b/src/test/correct/ifbranches/gcc_O2/ifbranches_gtirb.expected @@ -16,24 +16,21 @@ function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); function {:extern} {:bvbuiltin "bvcomp"} bvcomp32(bv32, bv32) returns (bv1); function {:extern} {:bvbuiltin "bvcomp"} bvcomp33(bv33, bv33) returns (bv1); function {:extern} {:bvbuiltin "bvnot"} bvnot1(bv1) returns (bv1); -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -41,8 +38,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -60,18 +57,12 @@ procedure main(); requires (Gamma_R0 == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/ifglobal/clang/ifglobal.expected b/src/test/correct/ifglobal/clang/ifglobal.expected index 04908c696..a049e2d90 100644 --- a/src/test/correct/ifglobal/clang/ifglobal.expected +++ b/src/test/correct/ifglobal/clang/ifglobal.expected @@ -20,7 +20,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -47,10 +47,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -60,12 +56,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -73,8 +66,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -91,20 +84,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, R9, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1892bv64) == 1bv8); - free requires (memory_load8_le(mem, 1893bv64) == 0bv8); - free requires (memory_load8_le(mem, 1894bv64) == 2bv8); - free requires (memory_load8_le(mem, 1895bv64) == 0bv8); + free requires (memory_load32_le(mem, 1892bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/ifglobal/clang/ifglobal_gtirb.expected b/src/test/correct/ifglobal/clang/ifglobal_gtirb.expected index dcf8b5a82..2e6edff1d 100644 --- a/src/test/correct/ifglobal/clang/ifglobal_gtirb.expected +++ b/src/test/correct/ifglobal/clang/ifglobal_gtirb.expected @@ -20,7 +20,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -46,10 +46,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -58,12 +54,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -71,8 +64,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -89,20 +82,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, R9, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1892bv64) == 1bv8); - free requires (memory_load8_le(mem, 1893bv64) == 0bv8); - free requires (memory_load8_le(mem, 1894bv64) == 2bv8); - free requires (memory_load8_le(mem, 1895bv64) == 0bv8); + free requires (memory_load32_le(mem, 1892bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/ifglobal/clang_O2/ifglobal.expected b/src/test/correct/ifglobal/clang_O2/ifglobal.expected index 3841ba288..9c75c68c6 100644 --- a/src/test/correct/ifglobal/clang_O2/ifglobal.expected +++ b/src/test/correct/ifglobal/clang_O2/ifglobal.expected @@ -8,7 +8,7 @@ var {:extern} R9: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -30,10 +30,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -41,12 +37,9 @@ function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -54,8 +47,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -72,18 +65,12 @@ procedure main(); modifies Gamma_R0, Gamma_R8, Gamma_R9, Gamma_mem, R0, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1868bv64) == 1bv8); - free requires (memory_load8_le(mem, 1869bv64) == 0bv8); - free requires (memory_load8_le(mem, 1870bv64) == 2bv8); - free requires (memory_load8_le(mem, 1871bv64) == 0bv8); + free requires (memory_load32_le(mem, 1868bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/ifglobal/clang_O2/ifglobal_gtirb.expected b/src/test/correct/ifglobal/clang_O2/ifglobal_gtirb.expected index ff90bf950..4d0aa9470 100644 --- a/src/test/correct/ifglobal/clang_O2/ifglobal_gtirb.expected +++ b/src/test/correct/ifglobal/clang_O2/ifglobal_gtirb.expected @@ -8,7 +8,7 @@ var {:extern} R9: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -40,12 +36,9 @@ function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -53,8 +46,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -71,18 +64,12 @@ procedure main(); modifies Gamma_R0, Gamma_R8, Gamma_R9, Gamma_mem, R0, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1868bv64) == 1bv8); - free requires (memory_load8_le(mem, 1869bv64) == 0bv8); - free requires (memory_load8_le(mem, 1870bv64) == 2bv8); - free requires (memory_load8_le(mem, 1871bv64) == 0bv8); + free requires (memory_load32_le(mem, 1868bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/ifglobal/clang_pic/ifglobal.expected b/src/test/correct/ifglobal/clang_pic/ifglobal.expected index 6be997683..2c79a79e3 100644 --- a/src/test/correct/ifglobal/clang_pic/ifglobal.expected +++ b/src/test/correct/ifglobal/clang_pic/ifglobal.expected @@ -20,7 +20,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -51,10 +51,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -64,22 +60,19 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -96,26 +89,20 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, R9, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/ifglobal/clang_pic/ifglobal_gtirb.expected b/src/test/correct/ifglobal/clang_pic/ifglobal_gtirb.expected index fe3a8d399..3491452f3 100644 --- a/src/test/correct/ifglobal/clang_pic/ifglobal_gtirb.expected +++ b/src/test/correct/ifglobal/clang_pic/ifglobal_gtirb.expected @@ -20,7 +20,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -50,10 +50,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -62,22 +58,19 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -94,26 +87,20 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, R9, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/ifglobal/gcc/ifglobal.expected b/src/test/correct/ifglobal/gcc/ifglobal.expected index 858159134..ef4f35400 100644 --- a/src/test/correct/ifglobal/gcc/ifglobal.expected +++ b/src/test/correct/ifglobal/gcc/ifglobal.expected @@ -14,7 +14,7 @@ var {:extern} ZF: bv1; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -41,10 +41,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -54,12 +50,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1876bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1877bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1878bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1879bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1876bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -67,8 +60,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -85,18 +78,12 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R1, Gamma_VF, Gamma_ZF, Gamma_mem, NF, R0, R1, VF, ZF, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1876bv64) == 1bv8); - free requires (memory_load8_le(mem, 1877bv64) == 0bv8); - free requires (memory_load8_le(mem, 1878bv64) == 2bv8); - free requires (memory_load8_le(mem, 1879bv64) == 0bv8); + free requires (memory_load32_le(mem, 1876bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1876bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1877bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1878bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1879bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1876bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/ifglobal/gcc/ifglobal_gtirb.expected b/src/test/correct/ifglobal/gcc/ifglobal_gtirb.expected index 87745aec2..0b7d7767d 100644 --- a/src/test/correct/ifglobal/gcc/ifglobal_gtirb.expected +++ b/src/test/correct/ifglobal/gcc/ifglobal_gtirb.expected @@ -14,7 +14,7 @@ var {:extern} ZF: bv1; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -40,10 +40,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -52,12 +48,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1876bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1877bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1878bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1879bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1876bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -65,8 +58,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -83,18 +76,12 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R1, Gamma_VF, Gamma_ZF, Gamma_mem, NF, R0, R1, VF, ZF, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1876bv64) == 1bv8); - free requires (memory_load8_le(mem, 1877bv64) == 0bv8); - free requires (memory_load8_le(mem, 1878bv64) == 2bv8); - free requires (memory_load8_le(mem, 1879bv64) == 0bv8); + free requires (memory_load32_le(mem, 1876bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1876bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1877bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1878bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1879bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1876bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/ifglobal/gcc_O2/ifglobal.expected b/src/test/correct/ifglobal/gcc_O2/ifglobal.expected index 93f8cf349..1e0ea5e01 100644 --- a/src/test/correct/ifglobal/gcc_O2/ifglobal.expected +++ b/src/test/correct/ifglobal/gcc_O2/ifglobal.expected @@ -6,7 +6,7 @@ var {:extern} R1: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -40,12 +36,9 @@ function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -53,8 +46,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -71,18 +64,12 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/ifglobal/gcc_O2/ifglobal_gtirb.expected b/src/test/correct/ifglobal/gcc_O2/ifglobal_gtirb.expected index f7ef66d68..0b576dc6c 100644 --- a/src/test/correct/ifglobal/gcc_O2/ifglobal_gtirb.expected +++ b/src/test/correct/ifglobal/gcc_O2/ifglobal_gtirb.expected @@ -6,7 +6,7 @@ var {:extern} R1: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -38,12 +34,9 @@ function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -51,8 +44,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -69,18 +62,12 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/ifglobal/gcc_pic/ifglobal.expected b/src/test/correct/ifglobal/gcc_pic/ifglobal.expected index 03fc8c3d9..99d76739e 100644 --- a/src/test/correct/ifglobal/gcc_pic/ifglobal.expected +++ b/src/test/correct/ifglobal/gcc_pic/ifglobal.expected @@ -14,7 +14,7 @@ var {:extern} ZF: bv1; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -45,10 +45,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -58,22 +54,19 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1940bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1941bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1942bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1943bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1940bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -90,24 +83,18 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R1, Gamma_VF, Gamma_ZF, Gamma_mem, NF, R0, R1, VF, ZF, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1940bv64) == 1bv8); - free requires (memory_load8_le(mem, 1941bv64) == 0bv8); - free requires (memory_load8_le(mem, 1942bv64) == 2bv8); - free requires (memory_load8_le(mem, 1943bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1940bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); - free ensures (memory_load8_le(mem, 1940bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1941bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1942bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1943bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 1940bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/ifglobal/gcc_pic/ifglobal_gtirb.expected b/src/test/correct/ifglobal/gcc_pic/ifglobal_gtirb.expected index d5ed782bd..599a59bca 100644 --- a/src/test/correct/ifglobal/gcc_pic/ifglobal_gtirb.expected +++ b/src/test/correct/ifglobal/gcc_pic/ifglobal_gtirb.expected @@ -14,7 +14,7 @@ var {:extern} ZF: bv1; var {:extern} mem: [bv64]bv8; const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $x_addr) then true else false) } @@ -44,10 +44,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -56,22 +52,19 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1940bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1941bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1942bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1943bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1940bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -88,24 +81,18 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R1, Gamma_VF, Gamma_ZF, Gamma_mem, NF, R0, R1, VF, ZF, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1940bv64) == 1bv8); - free requires (memory_load8_le(mem, 1941bv64) == 0bv8); - free requires (memory_load8_le(mem, 1942bv64) == 2bv8); - free requires (memory_load8_le(mem, 1943bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1940bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); - free ensures (memory_load8_le(mem, 1940bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1941bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1942bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1943bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 1940bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/indirect_call/clang/indirect_call_gtirb.expected b/src/test/correct/indirect_call/clang/indirect_call_gtirb.expected index 9824811ab..2afb32f54 100644 --- a/src/test/correct/indirect_call/clang/indirect_call_gtirb.expected +++ b/src/test/correct/indirect_call/clang/indirect_call_gtirb.expected @@ -18,7 +18,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 1996bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -62,8 +62,8 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load8_le(mem, 1996bv64) == 1bv8); free ensures (memory_load8_le(mem, 1997bv64) == 0bv8); free ensures (memory_load8_le(mem, 1998bv64) == 2bv8); @@ -84,8 +84,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/indirect_call/clang_pic/indirect_call_gtirb.expected b/src/test/correct/indirect_call/clang_pic/indirect_call_gtirb.expected index cd39145d7..85aa00a9a 100644 --- a/src/test/correct/indirect_call/clang_pic/indirect_call_gtirb.expected +++ b/src/test/correct/indirect_call/clang_pic/indirect_call_gtirb.expected @@ -18,7 +18,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2060bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -62,8 +62,8 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load8_le(mem, 2060bv64) == 1bv8); free ensures (memory_load8_le(mem, 2061bv64) == 0bv8); free ensures (memory_load8_le(mem, 2062bv64) == 2bv8); @@ -77,16 +77,16 @@ procedure {:extern} rely(); free ensures (memory_load8_le(mem, 2105bv64) == 58bv8); free ensures (memory_load8_le(mem, 2106bv64) == 10bv8); free ensures (memory_load8_le(mem, 2107bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 1940bv64); - free ensures (memory_load64_le(mem, 69680bv64) == 69680bv64); free ensures (memory_load64_le(mem, 69056bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1968bv64); + free ensures (memory_load64_le(mem, 69680bv64) == 69680bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -116,11 +116,11 @@ procedure main(); free requires (memory_load8_le(mem, 2105bv64) == 58bv8); free requires (memory_load8_le(mem, 2106bv64) == 10bv8); free requires (memory_load8_le(mem, 2107bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 1940bv64); - free requires (memory_load64_le(mem, 69680bv64) == 69680bv64); free requires (memory_load64_le(mem, 69056bv64) == 1936bv64); free requires (memory_load64_le(mem, 69064bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69576bv64) == 1940bv64); free requires (memory_load64_le(mem, 69592bv64) == 1968bv64); + free requires (memory_load64_le(mem, 69680bv64) == 69680bv64); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); @@ -138,11 +138,11 @@ procedure main(); free ensures (memory_load8_le(mem, 2105bv64) == 58bv8); free ensures (memory_load8_le(mem, 2106bv64) == 10bv8); free ensures (memory_load8_le(mem, 2107bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 1940bv64); - free ensures (memory_load64_le(mem, 69680bv64) == 69680bv64); free ensures (memory_load64_le(mem, 69056bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1968bv64); + free ensures (memory_load64_le(mem, 69680bv64) == 69680bv64); implementation main() { @@ -208,11 +208,11 @@ procedure FUN_650(); free requires (memory_load8_le(mem, 2105bv64) == 58bv8); free requires (memory_load8_le(mem, 2106bv64) == 10bv8); free requires (memory_load8_le(mem, 2107bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 1940bv64); - free requires (memory_load64_le(mem, 69680bv64) == 69680bv64); free requires (memory_load64_le(mem, 69056bv64) == 1936bv64); free requires (memory_load64_le(mem, 69064bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69576bv64) == 1940bv64); free requires (memory_load64_le(mem, 69592bv64) == 1968bv64); + free requires (memory_load64_le(mem, 69680bv64) == 69680bv64); free ensures (memory_load8_le(mem, 2060bv64) == 1bv8); free ensures (memory_load8_le(mem, 2061bv64) == 0bv8); free ensures (memory_load8_le(mem, 2062bv64) == 2bv8); @@ -226,11 +226,11 @@ procedure FUN_650(); free ensures (memory_load8_le(mem, 2105bv64) == 58bv8); free ensures (memory_load8_le(mem, 2106bv64) == 10bv8); free ensures (memory_load8_le(mem, 2107bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 1940bv64); - free ensures (memory_load64_le(mem, 69680bv64) == 69680bv64); free ensures (memory_load64_le(mem, 69056bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1968bv64); + free ensures (memory_load64_le(mem, 69680bv64) == 69680bv64); implementation FUN_650() { @@ -259,11 +259,11 @@ procedure greet(); free requires (memory_load8_le(mem, 2105bv64) == 58bv8); free requires (memory_load8_le(mem, 2106bv64) == 10bv8); free requires (memory_load8_le(mem, 2107bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 1940bv64); - free requires (memory_load64_le(mem, 69680bv64) == 69680bv64); free requires (memory_load64_le(mem, 69056bv64) == 1936bv64); free requires (memory_load64_le(mem, 69064bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69576bv64) == 1940bv64); free requires (memory_load64_le(mem, 69592bv64) == 1968bv64); + free requires (memory_load64_le(mem, 69680bv64) == 69680bv64); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); @@ -281,11 +281,11 @@ procedure greet(); free ensures (memory_load8_le(mem, 2105bv64) == 58bv8); free ensures (memory_load8_le(mem, 2106bv64) == 10bv8); free ensures (memory_load8_le(mem, 2107bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 1940bv64); - free ensures (memory_load64_le(mem, 69680bv64) == 69680bv64); free ensures (memory_load64_le(mem, 69056bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1968bv64); + free ensures (memory_load64_le(mem, 69680bv64) == 69680bv64); implementation greet() { @@ -330,11 +330,11 @@ procedure printf(); free requires (memory_load8_le(mem, 2105bv64) == 58bv8); free requires (memory_load8_le(mem, 2106bv64) == 10bv8); free requires (memory_load8_le(mem, 2107bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 1940bv64); - free requires (memory_load64_le(mem, 69680bv64) == 69680bv64); free requires (memory_load64_le(mem, 69056bv64) == 1936bv64); free requires (memory_load64_le(mem, 69064bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69576bv64) == 1940bv64); free requires (memory_load64_le(mem, 69592bv64) == 1968bv64); + free requires (memory_load64_le(mem, 69680bv64) == 69680bv64); free ensures (memory_load8_le(mem, 2060bv64) == 1bv8); free ensures (memory_load8_le(mem, 2061bv64) == 0bv8); free ensures (memory_load8_le(mem, 2062bv64) == 2bv8); @@ -348,9 +348,9 @@ procedure printf(); free ensures (memory_load8_le(mem, 2105bv64) == 58bv8); free ensures (memory_load8_le(mem, 2106bv64) == 10bv8); free ensures (memory_load8_le(mem, 2107bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 1940bv64); - free ensures (memory_load64_le(mem, 69680bv64) == 69680bv64); free ensures (memory_load64_le(mem, 69056bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1968bv64); + free ensures (memory_load64_le(mem, 69680bv64) == 69680bv64); diff --git a/src/test/correct/indirect_call/gcc/indirect_call_gtirb.expected b/src/test/correct/indirect_call/gcc/indirect_call_gtirb.expected index 1587a7a14..1b2be3e60 100644 --- a/src/test/correct/indirect_call/gcc/indirect_call_gtirb.expected +++ b/src/test/correct/indirect_call/gcc/indirect_call_gtirb.expected @@ -16,7 +16,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 1984bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -43,8 +43,8 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 1984bv64) == 131073bv64); free ensures (memory_load64_le(mem, 1992bv64) == 8583909746840200520bv64); free ensures (memory_load64_le(mem, 2000bv64) == 143418749551bv64); @@ -62,8 +62,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/indirect_call/gcc_pic/indirect_call_gtirb.expected b/src/test/correct/indirect_call/gcc_pic/indirect_call_gtirb.expected index 6fae6b6ba..01dd235a3 100644 --- a/src/test/correct/indirect_call/gcc_pic/indirect_call_gtirb.expected +++ b/src/test/correct/indirect_call/gcc_pic/indirect_call_gtirb.expected @@ -16,7 +16,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2048bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -43,8 +43,8 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2048bv64) == 131073bv64); free ensures (memory_load64_le(mem, 2056bv64) == 8583909746840200520bv64); free ensures (memory_load64_le(mem, 2064bv64) == 143418749551bv64); @@ -55,16 +55,16 @@ procedure {:extern} rely(); free ensures (memory_load8_le(mem, 2097bv64) == 121bv8); free ensures (memory_load8_le(mem, 2098bv64) == 58bv8); free ensures (memory_load8_le(mem, 2099bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1972bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 1940bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69000bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1972bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -89,11 +89,11 @@ procedure greet(); free requires (memory_load8_le(mem, 2097bv64) == 121bv8); free requires (memory_load8_le(mem, 2098bv64) == 58bv8); free requires (memory_load8_le(mem, 2099bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1972bv64); - free requires (memory_load64_le(mem, 69600bv64) == 1940bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1856bv64); free requires (memory_load64_le(mem, 69000bv64) == 1936bv64); + free requires (memory_load64_le(mem, 69008bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69600bv64) == 1940bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1972bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); @@ -108,11 +108,11 @@ procedure greet(); free ensures (memory_load8_le(mem, 2097bv64) == 121bv8); free ensures (memory_load8_le(mem, 2098bv64) == 58bv8); free ensures (memory_load8_le(mem, 2099bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1972bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 1940bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69000bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1972bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation greet() { @@ -155,11 +155,11 @@ procedure FUN_650(); free requires (memory_load8_le(mem, 2097bv64) == 121bv8); free requires (memory_load8_le(mem, 2098bv64) == 58bv8); free requires (memory_load8_le(mem, 2099bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1972bv64); - free requires (memory_load64_le(mem, 69600bv64) == 1940bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1856bv64); free requires (memory_load64_le(mem, 69000bv64) == 1936bv64); + free requires (memory_load64_le(mem, 69008bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69600bv64) == 1940bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1972bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 2048bv64) == 131073bv64); free ensures (memory_load64_le(mem, 2056bv64) == 8583909746840200520bv64); free ensures (memory_load64_le(mem, 2064bv64) == 143418749551bv64); @@ -170,11 +170,11 @@ procedure FUN_650(); free ensures (memory_load8_le(mem, 2097bv64) == 121bv8); free ensures (memory_load8_le(mem, 2098bv64) == 58bv8); free ensures (memory_load8_le(mem, 2099bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1972bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 1940bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69000bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1972bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation FUN_650() { @@ -202,11 +202,11 @@ procedure main(); free requires (memory_load8_le(mem, 2097bv64) == 121bv8); free requires (memory_load8_le(mem, 2098bv64) == 58bv8); free requires (memory_load8_le(mem, 2099bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1972bv64); - free requires (memory_load64_le(mem, 69600bv64) == 1940bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1856bv64); free requires (memory_load64_le(mem, 69000bv64) == 1936bv64); + free requires (memory_load64_le(mem, 69008bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69600bv64) == 1940bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1972bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); @@ -221,11 +221,11 @@ procedure main(); free ensures (memory_load8_le(mem, 2097bv64) == 121bv8); free ensures (memory_load8_le(mem, 2098bv64) == 58bv8); free ensures (memory_load8_le(mem, 2099bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1972bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 1940bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69000bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1972bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { @@ -279,11 +279,11 @@ procedure puts(); free requires (memory_load8_le(mem, 2097bv64) == 121bv8); free requires (memory_load8_le(mem, 2098bv64) == 58bv8); free requires (memory_load8_le(mem, 2099bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1972bv64); - free requires (memory_load64_le(mem, 69600bv64) == 1940bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1856bv64); free requires (memory_load64_le(mem, 69000bv64) == 1936bv64); + free requires (memory_load64_le(mem, 69008bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69600bv64) == 1940bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1972bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 2048bv64) == 131073bv64); free ensures (memory_load64_le(mem, 2056bv64) == 8583909746840200520bv64); free ensures (memory_load64_le(mem, 2064bv64) == 143418749551bv64); @@ -294,9 +294,9 @@ procedure puts(); free ensures (memory_load8_le(mem, 2097bv64) == 121bv8); free ensures (memory_load8_le(mem, 2098bv64) == 58bv8); free ensures (memory_load8_le(mem, 2099bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1972bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 1940bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69000bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1972bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); diff --git a/src/test/correct/initialisation/clang/initialisation.expected b/src/test/correct/initialisation/clang/initialisation.expected index 0af0f30d2..b6d670468 100644 --- a/src/test/correct/initialisation/clang/initialisation.expected +++ b/src/test/correct/initialisation/clang/initialisation.expected @@ -26,7 +26,7 @@ const {:extern} $y_addr: bv64; axiom ($y_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -90,12 +90,9 @@ function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1912bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1913bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1914bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1915bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1912bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -103,8 +100,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -124,10 +121,7 @@ procedure main(); free requires (memory_load64_le(mem, 69680bv64) == 416611827717bv64); free requires (memory_load64_le(mem, 69688bv64) == 68719476735bv64); free requires (memory_load64_le(mem, 69696bv64) == 8589934593bv64); - free requires (memory_load8_le(mem, 1912bv64) == 1bv8); - free requires (memory_load8_le(mem, 1913bv64) == 0bv8); - free requires (memory_load8_le(mem, 1914bv64) == 2bv8); - free requires (memory_load8_le(mem, 1915bv64) == 0bv8); + free requires (memory_load32_le(mem, 1912bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -135,10 +129,7 @@ procedure main(); ensures (memory_load32_le(mem, $x_addr) == 6bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 4bv64)) == 4bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 0bv64)) == 1bv32); - free ensures (memory_load8_le(mem, 1912bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1913bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1914bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1915bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1912bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/initialisation/clang/initialisation_gtirb.expected b/src/test/correct/initialisation/clang/initialisation_gtirb.expected index 3f5fbc23c..d27251515 100644 --- a/src/test/correct/initialisation/clang/initialisation_gtirb.expected +++ b/src/test/correct/initialisation/clang/initialisation_gtirb.expected @@ -26,7 +26,7 @@ const {:extern} $y_addr: bv64; axiom ($y_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -90,12 +90,9 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1912bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1913bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1914bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1915bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1912bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -103,8 +100,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -124,10 +121,7 @@ procedure main(); free requires (memory_load64_le(mem, 69680bv64) == 416611827717bv64); free requires (memory_load64_le(mem, 69688bv64) == 68719476735bv64); free requires (memory_load64_le(mem, 69696bv64) == 8589934593bv64); - free requires (memory_load8_le(mem, 1912bv64) == 1bv8); - free requires (memory_load8_le(mem, 1913bv64) == 0bv8); - free requires (memory_load8_le(mem, 1914bv64) == 2bv8); - free requires (memory_load8_le(mem, 1915bv64) == 0bv8); + free requires (memory_load32_le(mem, 1912bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -135,10 +129,7 @@ procedure main(); ensures (memory_load32_le(mem, $x_addr) == 6bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 4bv64)) == 4bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 0bv64)) == 1bv32); - free ensures (memory_load8_le(mem, 1912bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1913bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1914bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1915bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1912bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/initialisation/clang_O2/initialisation.expected b/src/test/correct/initialisation/clang_O2/initialisation.expected index 13d304de8..3562cf315 100644 --- a/src/test/correct/initialisation/clang_O2/initialisation.expected +++ b/src/test/correct/initialisation/clang_O2/initialisation.expected @@ -26,7 +26,7 @@ const {:extern} $y_addr: bv64; axiom ($y_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -84,12 +84,9 @@ function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1908bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1909bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1910bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1911bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1908bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -97,8 +94,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -118,10 +115,7 @@ procedure main(); free requires (memory_load64_le(mem, 69680bv64) == 416611827717bv64); free requires (memory_load64_le(mem, 69688bv64) == 68719476735bv64); free requires (memory_load64_le(mem, 69696bv64) == 8589934593bv64); - free requires (memory_load8_le(mem, 1908bv64) == 1bv8); - free requires (memory_load8_le(mem, 1909bv64) == 0bv8); - free requires (memory_load8_le(mem, 1910bv64) == 2bv8); - free requires (memory_load8_le(mem, 1911bv64) == 0bv8); + free requires (memory_load32_le(mem, 1908bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -129,10 +123,7 @@ procedure main(); ensures (memory_load32_le(mem, $x_addr) == 6bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 4bv64)) == 4bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 0bv64)) == 1bv32); - free ensures (memory_load8_le(mem, 1908bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1909bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1910bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1911bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1908bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/initialisation/clang_O2/initialisation_gtirb.expected b/src/test/correct/initialisation/clang_O2/initialisation_gtirb.expected index a9e116b67..3297b068d 100644 --- a/src/test/correct/initialisation/clang_O2/initialisation_gtirb.expected +++ b/src/test/correct/initialisation/clang_O2/initialisation_gtirb.expected @@ -26,7 +26,7 @@ const {:extern} $y_addr: bv64; axiom ($y_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -84,12 +84,9 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1908bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1909bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1910bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1911bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1908bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -97,8 +94,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -118,10 +115,7 @@ procedure main(); free requires (memory_load64_le(mem, 69680bv64) == 416611827717bv64); free requires (memory_load64_le(mem, 69688bv64) == 68719476735bv64); free requires (memory_load64_le(mem, 69696bv64) == 8589934593bv64); - free requires (memory_load8_le(mem, 1908bv64) == 1bv8); - free requires (memory_load8_le(mem, 1909bv64) == 0bv8); - free requires (memory_load8_le(mem, 1910bv64) == 2bv8); - free requires (memory_load8_le(mem, 1911bv64) == 0bv8); + free requires (memory_load32_le(mem, 1908bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -129,10 +123,7 @@ procedure main(); ensures (memory_load32_le(mem, $x_addr) == 6bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 4bv64)) == 4bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 0bv64)) == 1bv32); - free ensures (memory_load8_le(mem, 1908bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1909bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1910bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1911bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1908bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/initialisation/clang_pic/initialisation.expected b/src/test/correct/initialisation/clang_pic/initialisation.expected index dadd6b3d7..d38e9a7ca 100644 --- a/src/test/correct/initialisation/clang_pic/initialisation.expected +++ b/src/test/correct/initialisation/clang_pic/initialisation.expected @@ -24,7 +24,7 @@ const {:extern} $y_addr: bv64; axiom ($y_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -88,25 +88,22 @@ function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2048bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2049bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2050bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2051bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2048bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69032bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69040bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69552bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69560bv64) == 69680bv64); free ensures (memory_load64_le(mem, 69576bv64) == 69696bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69552bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69040bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1940bv64); - free ensures (memory_load64_le(mem, 69032bv64) == 1936bv64); - free ensures (memory_load64_le(mem, 69560bv64) == 69680bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -126,33 +123,27 @@ procedure main(); free requires (memory_load64_le(mem, 69680bv64) == 416611827717bv64); free requires (memory_load64_le(mem, 69688bv64) == 68719476735bv64); free requires (memory_load64_le(mem, 69696bv64) == 8589934593bv64); - free requires (memory_load8_le(mem, 2048bv64) == 1bv8); - free requires (memory_load8_le(mem, 2049bv64) == 0bv8); - free requires (memory_load8_le(mem, 2050bv64) == 2bv8); - free requires (memory_load8_le(mem, 2051bv64) == 0bv8); + free requires (memory_load32_le(mem, 2048bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69032bv64) == 1936bv64); + free requires (memory_load64_le(mem, 69040bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69552bv64) == 69688bv64); + free requires (memory_load64_le(mem, 69560bv64) == 69680bv64); free requires (memory_load64_le(mem, 69576bv64) == 69696bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69552bv64) == 69688bv64); - free requires (memory_load64_le(mem, 69040bv64) == 1856bv64); free requires (memory_load64_le(mem, 69592bv64) == 1940bv64); - free requires (memory_load64_le(mem, 69032bv64) == 1936bv64); - free requires (memory_load64_le(mem, 69560bv64) == 69680bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (memory_load32_le(mem, $x_addr) == 6bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 4bv64)) == 4bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 0bv64)) == 1bv32); - free ensures (memory_load8_le(mem, 2048bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2049bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2050bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2051bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2048bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69032bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69040bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69552bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69560bv64) == 69680bv64); free ensures (memory_load64_le(mem, 69576bv64) == 69696bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69552bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69040bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1940bv64); - free ensures (memory_load64_le(mem, 69032bv64) == 1936bv64); - free ensures (memory_load64_le(mem, 69560bv64) == 69680bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/initialisation/clang_pic/initialisation_gtirb.expected b/src/test/correct/initialisation/clang_pic/initialisation_gtirb.expected index b27c8c0fc..874733c01 100644 --- a/src/test/correct/initialisation/clang_pic/initialisation_gtirb.expected +++ b/src/test/correct/initialisation/clang_pic/initialisation_gtirb.expected @@ -24,7 +24,7 @@ const {:extern} $y_addr: bv64; axiom ($y_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -88,25 +88,22 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2048bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2049bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2050bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2051bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2048bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69032bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69040bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69552bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69560bv64) == 69680bv64); free ensures (memory_load64_le(mem, 69576bv64) == 69696bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69552bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69040bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1940bv64); - free ensures (memory_load64_le(mem, 69032bv64) == 1936bv64); - free ensures (memory_load64_le(mem, 69560bv64) == 69680bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -126,33 +123,27 @@ procedure main(); free requires (memory_load64_le(mem, 69680bv64) == 416611827717bv64); free requires (memory_load64_le(mem, 69688bv64) == 68719476735bv64); free requires (memory_load64_le(mem, 69696bv64) == 8589934593bv64); - free requires (memory_load8_le(mem, 2048bv64) == 1bv8); - free requires (memory_load8_le(mem, 2049bv64) == 0bv8); - free requires (memory_load8_le(mem, 2050bv64) == 2bv8); - free requires (memory_load8_le(mem, 2051bv64) == 0bv8); + free requires (memory_load32_le(mem, 2048bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69032bv64) == 1936bv64); + free requires (memory_load64_le(mem, 69040bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69552bv64) == 69688bv64); + free requires (memory_load64_le(mem, 69560bv64) == 69680bv64); free requires (memory_load64_le(mem, 69576bv64) == 69696bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69552bv64) == 69688bv64); - free requires (memory_load64_le(mem, 69040bv64) == 1856bv64); free requires (memory_load64_le(mem, 69592bv64) == 1940bv64); - free requires (memory_load64_le(mem, 69032bv64) == 1936bv64); - free requires (memory_load64_le(mem, 69560bv64) == 69680bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (memory_load32_le(mem, $x_addr) == 6bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 4bv64)) == 4bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 0bv64)) == 1bv32); - free ensures (memory_load8_le(mem, 2048bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2049bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2050bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2051bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2048bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69032bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69040bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69552bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69560bv64) == 69680bv64); free ensures (memory_load64_le(mem, 69576bv64) == 69696bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69552bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69040bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1940bv64); - free ensures (memory_load64_le(mem, 69032bv64) == 1936bv64); - free ensures (memory_load64_le(mem, 69560bv64) == 69680bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/initialisation/gcc/initialisation.expected b/src/test/correct/initialisation/gcc/initialisation.expected index cec7d3cbd..c1e20ad01 100644 --- a/src/test/correct/initialisation/gcc/initialisation.expected +++ b/src/test/correct/initialisation/gcc/initialisation.expected @@ -12,7 +12,7 @@ const {:extern} $y_addr: bv64; axiom ($y_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -70,12 +70,9 @@ function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -83,8 +80,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -104,10 +101,7 @@ procedure main(); free requires (memory_load64_le(mem, 69648bv64) == 416611827717bv64); free requires (memory_load64_le(mem, 69656bv64) == 68719476735bv64); free requires (memory_load64_le(mem, 69664bv64) == 8589934593bv64); - free requires (memory_load8_le(mem, 1956bv64) == 1bv8); - free requires (memory_load8_le(mem, 1957bv64) == 0bv8); - free requires (memory_load8_le(mem, 1958bv64) == 2bv8); - free requires (memory_load8_le(mem, 1959bv64) == 0bv8); + free requires (memory_load32_le(mem, 1956bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -115,10 +109,7 @@ procedure main(); ensures (memory_load32_le(mem, $x_addr) == 6bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 4bv64)) == 4bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 0bv64)) == 1bv32); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/initialisation/gcc/initialisation_gtirb.expected b/src/test/correct/initialisation/gcc/initialisation_gtirb.expected index 45006ee53..d23427f1b 100644 --- a/src/test/correct/initialisation/gcc/initialisation_gtirb.expected +++ b/src/test/correct/initialisation/gcc/initialisation_gtirb.expected @@ -12,7 +12,7 @@ const {:extern} $y_addr: bv64; axiom ($y_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -70,12 +70,9 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -83,8 +80,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -104,10 +101,7 @@ procedure main(); free requires (memory_load64_le(mem, 69648bv64) == 416611827717bv64); free requires (memory_load64_le(mem, 69656bv64) == 68719476735bv64); free requires (memory_load64_le(mem, 69664bv64) == 8589934593bv64); - free requires (memory_load8_le(mem, 1956bv64) == 1bv8); - free requires (memory_load8_le(mem, 1957bv64) == 0bv8); - free requires (memory_load8_le(mem, 1958bv64) == 2bv8); - free requires (memory_load8_le(mem, 1959bv64) == 0bv8); + free requires (memory_load32_le(mem, 1956bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -115,10 +109,7 @@ procedure main(); ensures (memory_load32_le(mem, $x_addr) == 6bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 4bv64)) == 4bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 0bv64)) == 1bv32); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/initialisation/gcc_O2/initialisation.expected b/src/test/correct/initialisation/gcc_O2/initialisation.expected index efb62f192..c9f0e32be 100644 --- a/src/test/correct/initialisation/gcc_O2/initialisation.expected +++ b/src/test/correct/initialisation/gcc_O2/initialisation.expected @@ -22,7 +22,7 @@ const {:extern} $y_addr: bv64; axiom ($y_addr == 69672bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -80,12 +80,9 @@ function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -93,8 +90,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -115,10 +112,7 @@ procedure main(); free requires (memory_load64_le(mem, 69656bv64) == 68719476735bv64); free requires (memory_load64_le(mem, 69664bv64) == 8589934593bv64); free requires (memory_load8_le(mem, 69672bv64) == 97bv8); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -126,10 +120,7 @@ procedure main(); ensures (memory_load32_le(mem, $x_addr) == 6bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 4bv64)) == 4bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 0bv64)) == 1bv32); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/initialisation/gcc_O2/initialisation_gtirb.expected b/src/test/correct/initialisation/gcc_O2/initialisation_gtirb.expected index cc20e1b32..aed04a776 100644 --- a/src/test/correct/initialisation/gcc_O2/initialisation_gtirb.expected +++ b/src/test/correct/initialisation/gcc_O2/initialisation_gtirb.expected @@ -22,7 +22,7 @@ const {:extern} $y_addr: bv64; axiom ($y_addr == 69672bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -80,12 +80,9 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -93,8 +90,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -115,10 +112,7 @@ procedure main(); free requires (memory_load64_le(mem, 69656bv64) == 68719476735bv64); free requires (memory_load64_le(mem, 69664bv64) == 8589934593bv64); free requires (memory_load8_le(mem, 69672bv64) == 97bv8); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -126,10 +120,7 @@ procedure main(); ensures (memory_load32_le(mem, $x_addr) == 6bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 4bv64)) == 4bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 0bv64)) == 1bv32); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/initialisation/gcc_pic/initialisation.expected b/src/test/correct/initialisation/gcc_pic/initialisation.expected index ef5a7eb85..5a5a277e7 100644 --- a/src/test/correct/initialisation/gcc_pic/initialisation.expected +++ b/src/test/correct/initialisation/gcc_pic/initialisation.expected @@ -12,7 +12,7 @@ const {:extern} $y_addr: bv64; axiom ($y_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -70,25 +70,22 @@ function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2084bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2085bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2086bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2087bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69652bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2084bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 68984bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 68992bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69576bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69584bv64) == 69648bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69664bv64); - free ensures (memory_load64_le(mem, 68984bv64) == 1936bv64); - free ensures (memory_load64_le(mem, 68992bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -108,33 +105,27 @@ procedure main(); free requires (memory_load64_le(mem, 69648bv64) == 416611827717bv64); free requires (memory_load64_le(mem, 69656bv64) == 68719476735bv64); free requires (memory_load64_le(mem, 69664bv64) == 8589934593bv64); - free requires (memory_load8_le(mem, 2084bv64) == 1bv8); - free requires (memory_load8_le(mem, 2085bv64) == 0bv8); - free requires (memory_load8_le(mem, 2086bv64) == 2bv8); - free requires (memory_load8_le(mem, 2087bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69652bv64); + free requires (memory_load32_le(mem, 2084bv64) == 131073bv32); + free requires (memory_load64_le(mem, 68984bv64) == 1936bv64); + free requires (memory_load64_le(mem, 68992bv64) == 1856bv64); free requires (memory_load64_le(mem, 69576bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1940bv64); free requires (memory_load64_le(mem, 69584bv64) == 69648bv64); free requires (memory_load64_le(mem, 69600bv64) == 69664bv64); - free requires (memory_load64_le(mem, 68984bv64) == 1936bv64); - free requires (memory_load64_le(mem, 68992bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69652bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1940bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (memory_load32_le(mem, $x_addr) == 6bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 4bv64)) == 4bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 0bv64)) == 1bv32); - free ensures (memory_load8_le(mem, 2084bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2085bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2086bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2087bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 2084bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 68984bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 68992bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69576bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69584bv64) == 69648bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69664bv64); - free ensures (memory_load64_le(mem, 68984bv64) == 1936bv64); - free ensures (memory_load64_le(mem, 68992bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/initialisation/gcc_pic/initialisation_gtirb.expected b/src/test/correct/initialisation/gcc_pic/initialisation_gtirb.expected index 0de8f82bf..bc4fb819f 100644 --- a/src/test/correct/initialisation/gcc_pic/initialisation_gtirb.expected +++ b/src/test/correct/initialisation/gcc_pic/initialisation_gtirb.expected @@ -12,7 +12,7 @@ const {:extern} $y_addr: bv64; axiom ($y_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -70,25 +70,22 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2084bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2085bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2086bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2087bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69652bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2084bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 68984bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 68992bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69576bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69584bv64) == 69648bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69664bv64); - free ensures (memory_load64_le(mem, 68984bv64) == 1936bv64); - free ensures (memory_load64_le(mem, 68992bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -108,33 +105,27 @@ procedure main(); free requires (memory_load64_le(mem, 69648bv64) == 416611827717bv64); free requires (memory_load64_le(mem, 69656bv64) == 68719476735bv64); free requires (memory_load64_le(mem, 69664bv64) == 8589934593bv64); - free requires (memory_load8_le(mem, 2084bv64) == 1bv8); - free requires (memory_load8_le(mem, 2085bv64) == 0bv8); - free requires (memory_load8_le(mem, 2086bv64) == 2bv8); - free requires (memory_load8_le(mem, 2087bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69652bv64); + free requires (memory_load32_le(mem, 2084bv64) == 131073bv32); + free requires (memory_load64_le(mem, 68984bv64) == 1936bv64); + free requires (memory_load64_le(mem, 68992bv64) == 1856bv64); free requires (memory_load64_le(mem, 69576bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1940bv64); free requires (memory_load64_le(mem, 69584bv64) == 69648bv64); free requires (memory_load64_le(mem, 69600bv64) == 69664bv64); - free requires (memory_load64_le(mem, 68984bv64) == 1936bv64); - free requires (memory_load64_le(mem, 68992bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69652bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1940bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (memory_load32_le(mem, $x_addr) == 6bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 4bv64)) == 4bv32); ensures (memory_load32_le(mem, bvadd64($a_addr, 0bv64)) == 1bv32); - free ensures (memory_load8_le(mem, 2084bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2085bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2086bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2087bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 2084bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 68984bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 68992bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69576bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69584bv64) == 69648bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69664bv64); - free ensures (memory_load64_le(mem, 68984bv64) == 1936bv64); - free ensures (memory_load64_le(mem, 68992bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1940bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/jumptable2/clang/jumptable2_gtirb.expected b/src/test/correct/jumptable2/clang/jumptable2_gtirb.expected index 0ca38869d..aedfd59b2 100644 --- a/src/test/correct/jumptable2/clang/jumptable2_gtirb.expected +++ b/src/test/correct/jumptable2/clang/jumptable2_gtirb.expected @@ -30,7 +30,7 @@ const {:extern} $jump_table_addr: bv64; axiom ($jump_table_addr == 69688bv64); const {:extern} $x_addr: bv64; axiom ($x_addr == 69680bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -64,10 +64,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -81,24 +77,21 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2052bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2053bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2054bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2055bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); - free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); - free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2052bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); + free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -113,28 +106,22 @@ procedure {:extern} guarantee_reflexive(); procedure add_two(); modifies Gamma_R8, Gamma_R9, Gamma_mem, R8, R9, mem; - free requires (memory_load8_le(mem, 2052bv64) == 1bv8); - free requires (memory_load8_le(mem, 2053bv64) == 0bv8); - free requires (memory_load8_le(mem, 2054bv64) == 2bv8); - free requires (memory_load8_le(mem, 2055bv64) == 0bv8); - free requires (memory_load64_le(mem, 69688bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69704bv64) == 1916bv64); - free requires (memory_load64_le(mem, 69696bv64) == 1896bv64); - free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); + free requires (memory_load32_le(mem, 2052bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 1936bv64); - free ensures (memory_load8_le(mem, 2052bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2053bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2054bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2055bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); - free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); - free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load64_le(mem, 69688bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69696bv64) == 1896bv64); + free requires (memory_load64_le(mem, 69704bv64) == 1916bv64); + free ensures (memory_load32_le(mem, 2052bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); + free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); implementation add_two() { @@ -156,28 +143,22 @@ implementation add_two() procedure sub_seven(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R8, Gamma_R9, Gamma_VF, Gamma_ZF, Gamma_mem, NF, R8, R9, VF, ZF, mem; - free requires (memory_load8_le(mem, 2052bv64) == 1bv8); - free requires (memory_load8_le(mem, 2053bv64) == 0bv8); - free requires (memory_load8_le(mem, 2054bv64) == 2bv8); - free requires (memory_load8_le(mem, 2055bv64) == 0bv8); - free requires (memory_load64_le(mem, 69688bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69704bv64) == 1916bv64); - free requires (memory_load64_le(mem, 69696bv64) == 1896bv64); - free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); + free requires (memory_load32_le(mem, 2052bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 1936bv64); - free ensures (memory_load8_le(mem, 2052bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2053bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2054bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2055bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); - free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); - free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load64_le(mem, 69688bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69696bv64) == 1896bv64); + free requires (memory_load64_le(mem, 69704bv64) == 1916bv64); + free ensures (memory_load32_le(mem, 2052bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); + free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); implementation sub_seven() { @@ -206,28 +187,22 @@ implementation sub_seven() procedure add_six(); modifies Gamma_R8, Gamma_R9, Gamma_mem, R8, R9, mem; - free requires (memory_load8_le(mem, 2052bv64) == 1bv8); - free requires (memory_load8_le(mem, 2053bv64) == 0bv8); - free requires (memory_load8_le(mem, 2054bv64) == 2bv8); - free requires (memory_load8_le(mem, 2055bv64) == 0bv8); - free requires (memory_load64_le(mem, 69688bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69704bv64) == 1916bv64); - free requires (memory_load64_le(mem, 69696bv64) == 1896bv64); - free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); + free requires (memory_load32_le(mem, 2052bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 1936bv64); - free ensures (memory_load8_le(mem, 2052bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2053bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2054bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2055bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); - free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); - free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load64_le(mem, 69688bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69696bv64) == 1896bv64); + free requires (memory_load64_le(mem, 69704bv64) == 1916bv64); + free ensures (memory_load32_le(mem, 2052bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); + free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); implementation add_six() { @@ -255,32 +230,26 @@ procedure main(); free requires (memory_load64_le(mem, 69688bv64) == 1876bv64); free requires (memory_load64_le(mem, 69696bv64) == 1896bv64); free requires (memory_load64_le(mem, 69704bv64) == 1916bv64); - free requires (memory_load8_le(mem, 2052bv64) == 1bv8); - free requires (memory_load8_le(mem, 2053bv64) == 0bv8); - free requires (memory_load8_le(mem, 2054bv64) == 2bv8); - free requires (memory_load8_le(mem, 2055bv64) == 0bv8); - free requires (memory_load64_le(mem, 69688bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69704bv64) == 1916bv64); - free requires (memory_load64_le(mem, 69696bv64) == 1896bv64); - free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); + free requires (memory_load32_le(mem, 2052bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 1936bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load64_le(mem, 69688bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69696bv64) == 1896bv64); + free requires (memory_load64_le(mem, 69704bv64) == 1916bv64); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2052bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2053bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2054bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2055bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); - free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); - free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); + free ensures (memory_load32_le(mem, 2052bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); + free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); implementation main() { diff --git a/src/test/correct/jumptable2/clang_O2/jumptable2_gtirb.expected b/src/test/correct/jumptable2/clang_O2/jumptable2_gtirb.expected index dc88e8297..37536bc1f 100644 --- a/src/test/correct/jumptable2/clang_O2/jumptable2_gtirb.expected +++ b/src/test/correct/jumptable2/clang_O2/jumptable2_gtirb.expected @@ -22,7 +22,7 @@ const {:extern} $jump_table_addr: bv64; axiom ($jump_table_addr == 69688bv64); const {:extern} $x_addr: bv64; axiom ($x_addr == 69680bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -52,10 +52,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -67,24 +63,21 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2016bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2017bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2018bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2019bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); - free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); - free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2016bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); + free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -105,34 +98,28 @@ procedure main(); free requires (memory_load64_le(mem, 69688bv64) == 1876bv64); free requires (memory_load64_le(mem, 69696bv64) == 1896bv64); free requires (memory_load64_le(mem, 69704bv64) == 1916bv64); - free requires (memory_load8_le(mem, 2016bv64) == 1bv8); - free requires (memory_load8_le(mem, 2017bv64) == 0bv8); - free requires (memory_load8_le(mem, 2018bv64) == 2bv8); - free requires (memory_load8_le(mem, 2019bv64) == 0bv8); - free requires (memory_load64_le(mem, 69688bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69704bv64) == 1916bv64); - free requires (memory_load64_le(mem, 69696bv64) == 1896bv64); - free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); + free requires (memory_load32_le(mem, 2016bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 1936bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load64_le(mem, 69688bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69696bv64) == 1896bv64); + free requires (memory_load64_le(mem, 69704bv64) == 1916bv64); free ensures (Gamma_R19 == old(Gamma_R19)); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R19 == old(R19)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2016bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2017bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2018bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2019bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); - free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); - free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); + free ensures (memory_load32_le(mem, 2016bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); + free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); implementation main() { @@ -185,28 +172,22 @@ implementation main() procedure sub_seven(); modifies Gamma_R8, Gamma_R9, Gamma_mem, R8, R9, mem; - free requires (memory_load8_le(mem, 2016bv64) == 1bv8); - free requires (memory_load8_le(mem, 2017bv64) == 0bv8); - free requires (memory_load8_le(mem, 2018bv64) == 2bv8); - free requires (memory_load8_le(mem, 2019bv64) == 0bv8); - free requires (memory_load64_le(mem, 69688bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69704bv64) == 1916bv64); - free requires (memory_load64_le(mem, 69696bv64) == 1896bv64); - free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); + free requires (memory_load32_le(mem, 2016bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 1936bv64); - free ensures (memory_load8_le(mem, 2016bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2017bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2018bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2019bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); - free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); - free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load64_le(mem, 69688bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69696bv64) == 1896bv64); + free requires (memory_load64_le(mem, 69704bv64) == 1916bv64); + free ensures (memory_load32_le(mem, 2016bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); + free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); implementation sub_seven() { @@ -228,28 +209,22 @@ implementation sub_seven() procedure add_two(); modifies Gamma_R8, Gamma_R9, Gamma_mem, R8, R9, mem; - free requires (memory_load8_le(mem, 2016bv64) == 1bv8); - free requires (memory_load8_le(mem, 2017bv64) == 0bv8); - free requires (memory_load8_le(mem, 2018bv64) == 2bv8); - free requires (memory_load8_le(mem, 2019bv64) == 0bv8); - free requires (memory_load64_le(mem, 69688bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69704bv64) == 1916bv64); - free requires (memory_load64_le(mem, 69696bv64) == 1896bv64); - free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); + free requires (memory_load32_le(mem, 2016bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 1936bv64); - free ensures (memory_load8_le(mem, 2016bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2017bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2018bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2019bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); - free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); - free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load64_le(mem, 69688bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69696bv64) == 1896bv64); + free requires (memory_load64_le(mem, 69704bv64) == 1916bv64); + free ensures (memory_load32_le(mem, 2016bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); + free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); implementation add_two() { @@ -271,28 +246,22 @@ implementation add_two() procedure add_six(); modifies Gamma_R8, Gamma_R9, Gamma_mem, R8, R9, mem; - free requires (memory_load8_le(mem, 2016bv64) == 1bv8); - free requires (memory_load8_le(mem, 2017bv64) == 0bv8); - free requires (memory_load8_le(mem, 2018bv64) == 2bv8); - free requires (memory_load8_le(mem, 2019bv64) == 0bv8); - free requires (memory_load64_le(mem, 69688bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69704bv64) == 1916bv64); - free requires (memory_load64_le(mem, 69696bv64) == 1896bv64); - free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); + free requires (memory_load32_le(mem, 2016bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 1936bv64); - free ensures (memory_load8_le(mem, 2016bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2017bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2018bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2019bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); - free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); - free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load64_le(mem, 69688bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69696bv64) == 1896bv64); + free requires (memory_load64_le(mem, 69704bv64) == 1916bv64); + free ensures (memory_load32_le(mem, 2016bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load64_le(mem, 69688bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69696bv64) == 1896bv64); + free ensures (memory_load64_le(mem, 69704bv64) == 1916bv64); implementation add_six() { diff --git a/src/test/correct/jumptable2/clang_pic/jumptable2_gtirb.expected b/src/test/correct/jumptable2/clang_pic/jumptable2_gtirb.expected index 1cebf2f2d..829cdbe50 100644 --- a/src/test/correct/jumptable2/clang_pic/jumptable2_gtirb.expected +++ b/src/test/correct/jumptable2/clang_pic/jumptable2_gtirb.expected @@ -30,7 +30,7 @@ const {:extern} $jump_table_addr: bv64; axiom ($jump_table_addr == 69688bv64); const {:extern} $x_addr: bv64; axiom ($x_addr == 69680bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -64,10 +64,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -81,26 +77,23 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2124bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2125bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2126bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2127bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69584bv64) == 2012bv64); - free ensures (memory_load64_le(mem, 69688bv64) == 1940bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2124bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1936bv64); - free ensures (memory_load64_le(mem, 69568bv64) == 69680bv64); free ensures (memory_load64_le(mem, 69056bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69568bv64) == 69680bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 2012bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69704bv64) == 1988bv64); + free ensures (memory_load64_le(mem, 69688bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69696bv64) == 1964bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69704bv64) == 1988bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -115,32 +108,26 @@ procedure {:extern} guarantee_reflexive(); procedure add_two(); modifies Gamma_R8, Gamma_R9, Gamma_mem, R8, R9, mem; - free requires (memory_load8_le(mem, 2124bv64) == 1bv8); - free requires (memory_load8_le(mem, 2125bv64) == 0bv8); - free requires (memory_load8_le(mem, 2126bv64) == 2bv8); - free requires (memory_load8_le(mem, 2127bv64) == 0bv8); - free requires (memory_load64_le(mem, 69584bv64) == 2012bv64); - free requires (memory_load64_le(mem, 69688bv64) == 1940bv64); + free requires (memory_load32_le(mem, 2124bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1936bv64); - free requires (memory_load64_le(mem, 69568bv64) == 69680bv64); free requires (memory_load64_le(mem, 69056bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69568bv64) == 69680bv64); + free requires (memory_load64_le(mem, 69584bv64) == 2012bv64); + free requires (memory_load64_le(mem, 69600bv64) == 69688bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69704bv64) == 1988bv64); + free requires (memory_load64_le(mem, 69688bv64) == 1940bv64); free requires (memory_load64_le(mem, 69696bv64) == 1964bv64); - free requires (memory_load64_le(mem, 69600bv64) == 69688bv64); - free ensures (memory_load8_le(mem, 2124bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2125bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2126bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2127bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69584bv64) == 2012bv64); - free ensures (memory_load64_le(mem, 69688bv64) == 1940bv64); + free requires (memory_load64_le(mem, 69704bv64) == 1988bv64); + free ensures (memory_load32_le(mem, 2124bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1936bv64); - free ensures (memory_load64_le(mem, 69568bv64) == 69680bv64); free ensures (memory_load64_le(mem, 69056bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69568bv64) == 69680bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 2012bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69704bv64) == 1988bv64); + free ensures (memory_load64_le(mem, 69688bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69696bv64) == 1964bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69704bv64) == 1988bv64); implementation add_two() { @@ -164,32 +151,26 @@ implementation add_two() procedure add_six(); modifies Gamma_R8, Gamma_R9, Gamma_mem, R8, R9, mem; - free requires (memory_load8_le(mem, 2124bv64) == 1bv8); - free requires (memory_load8_le(mem, 2125bv64) == 0bv8); - free requires (memory_load8_le(mem, 2126bv64) == 2bv8); - free requires (memory_load8_le(mem, 2127bv64) == 0bv8); - free requires (memory_load64_le(mem, 69584bv64) == 2012bv64); - free requires (memory_load64_le(mem, 69688bv64) == 1940bv64); + free requires (memory_load32_le(mem, 2124bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1936bv64); - free requires (memory_load64_le(mem, 69568bv64) == 69680bv64); free requires (memory_load64_le(mem, 69056bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69568bv64) == 69680bv64); + free requires (memory_load64_le(mem, 69584bv64) == 2012bv64); + free requires (memory_load64_le(mem, 69600bv64) == 69688bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69704bv64) == 1988bv64); + free requires (memory_load64_le(mem, 69688bv64) == 1940bv64); free requires (memory_load64_le(mem, 69696bv64) == 1964bv64); - free requires (memory_load64_le(mem, 69600bv64) == 69688bv64); - free ensures (memory_load8_le(mem, 2124bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2125bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2126bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2127bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69584bv64) == 2012bv64); - free ensures (memory_load64_le(mem, 69688bv64) == 1940bv64); + free requires (memory_load64_le(mem, 69704bv64) == 1988bv64); + free ensures (memory_load32_le(mem, 2124bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1936bv64); - free ensures (memory_load64_le(mem, 69568bv64) == 69680bv64); free ensures (memory_load64_le(mem, 69056bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69568bv64) == 69680bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 2012bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69704bv64) == 1988bv64); + free ensures (memory_load64_le(mem, 69688bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69696bv64) == 1964bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69704bv64) == 1988bv64); implementation add_six() { @@ -219,36 +200,30 @@ procedure main(); free requires (memory_load64_le(mem, 69688bv64) == 1940bv64); free requires (memory_load64_le(mem, 69696bv64) == 1964bv64); free requires (memory_load64_le(mem, 69704bv64) == 1988bv64); - free requires (memory_load8_le(mem, 2124bv64) == 1bv8); - free requires (memory_load8_le(mem, 2125bv64) == 0bv8); - free requires (memory_load8_le(mem, 2126bv64) == 2bv8); - free requires (memory_load8_le(mem, 2127bv64) == 0bv8); - free requires (memory_load64_le(mem, 69584bv64) == 2012bv64); - free requires (memory_load64_le(mem, 69688bv64) == 1940bv64); + free requires (memory_load32_le(mem, 2124bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1936bv64); - free requires (memory_load64_le(mem, 69568bv64) == 69680bv64); free requires (memory_load64_le(mem, 69056bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69568bv64) == 69680bv64); + free requires (memory_load64_le(mem, 69584bv64) == 2012bv64); + free requires (memory_load64_le(mem, 69600bv64) == 69688bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69704bv64) == 1988bv64); + free requires (memory_load64_le(mem, 69688bv64) == 1940bv64); free requires (memory_load64_le(mem, 69696bv64) == 1964bv64); - free requires (memory_load64_le(mem, 69600bv64) == 69688bv64); + free requires (memory_load64_le(mem, 69704bv64) == 1988bv64); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2124bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2125bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2126bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2127bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69584bv64) == 2012bv64); - free ensures (memory_load64_le(mem, 69688bv64) == 1940bv64); + free ensures (memory_load32_le(mem, 2124bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1936bv64); - free ensures (memory_load64_le(mem, 69568bv64) == 69680bv64); free ensures (memory_load64_le(mem, 69056bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69568bv64) == 69680bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 2012bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69704bv64) == 1988bv64); + free ensures (memory_load64_le(mem, 69688bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69696bv64) == 1964bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69704bv64) == 1988bv64); implementation main() { @@ -315,32 +290,26 @@ implementation main() procedure sub_seven(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R8, Gamma_R9, Gamma_VF, Gamma_ZF, Gamma_mem, NF, R8, R9, VF, ZF, mem; - free requires (memory_load8_le(mem, 2124bv64) == 1bv8); - free requires (memory_load8_le(mem, 2125bv64) == 0bv8); - free requires (memory_load8_le(mem, 2126bv64) == 2bv8); - free requires (memory_load8_le(mem, 2127bv64) == 0bv8); - free requires (memory_load64_le(mem, 69584bv64) == 2012bv64); - free requires (memory_load64_le(mem, 69688bv64) == 1940bv64); + free requires (memory_load32_le(mem, 2124bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1936bv64); - free requires (memory_load64_le(mem, 69568bv64) == 69680bv64); free requires (memory_load64_le(mem, 69056bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69568bv64) == 69680bv64); + free requires (memory_load64_le(mem, 69584bv64) == 2012bv64); + free requires (memory_load64_le(mem, 69600bv64) == 69688bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69704bv64) == 1988bv64); + free requires (memory_load64_le(mem, 69688bv64) == 1940bv64); free requires (memory_load64_le(mem, 69696bv64) == 1964bv64); - free requires (memory_load64_le(mem, 69600bv64) == 69688bv64); - free ensures (memory_load8_le(mem, 2124bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2125bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2126bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2127bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69584bv64) == 2012bv64); - free ensures (memory_load64_le(mem, 69688bv64) == 1940bv64); + free requires (memory_load64_le(mem, 69704bv64) == 1988bv64); + free ensures (memory_load32_le(mem, 2124bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1936bv64); - free ensures (memory_load64_le(mem, 69568bv64) == 69680bv64); free ensures (memory_load64_le(mem, 69056bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69568bv64) == 69680bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 2012bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69704bv64) == 1988bv64); + free ensures (memory_load64_le(mem, 69688bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69696bv64) == 1964bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69704bv64) == 1988bv64); implementation sub_seven() { diff --git a/src/test/correct/jumptable2/gcc/jumptable2_gtirb.expected b/src/test/correct/jumptable2/gcc/jumptable2_gtirb.expected index 37f9a52df..9c87ecd7c 100644 --- a/src/test/correct/jumptable2/gcc/jumptable2_gtirb.expected +++ b/src/test/correct/jumptable2/gcc/jumptable2_gtirb.expected @@ -18,7 +18,7 @@ const {:extern} $jump_table_addr: bv64; axiom ($jump_table_addr == 69656bv64); const {:extern} $x_addr: bv64; axiom ($x_addr == 69648bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -48,10 +48,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -63,24 +59,21 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2080bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2081bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2082bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2083bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2080bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1984bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69656bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69664bv64) == 1912bv64); - free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69672bv64) == 1948bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -95,28 +88,22 @@ procedure {:extern} guarantee_reflexive(); procedure add_two(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; - free requires (memory_load8_le(mem, 2080bv64) == 1bv8); - free requires (memory_load8_le(mem, 2081bv64) == 0bv8); - free requires (memory_load8_le(mem, 2082bv64) == 2bv8); - free requires (memory_load8_le(mem, 2083bv64) == 0bv8); + free requires (memory_load32_le(mem, 2080bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1984bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free requires (memory_load64_le(mem, 69656bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69664bv64) == 1912bv64); - free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69672bv64) == 1948bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2080bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2081bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2082bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2083bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2080bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1984bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69656bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69664bv64) == 1912bv64); - free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69672bv64) == 1948bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation add_two() { @@ -141,28 +128,22 @@ implementation add_two() procedure sub_seven(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; - free requires (memory_load8_le(mem, 2080bv64) == 1bv8); - free requires (memory_load8_le(mem, 2081bv64) == 0bv8); - free requires (memory_load8_le(mem, 2082bv64) == 2bv8); - free requires (memory_load8_le(mem, 2083bv64) == 0bv8); + free requires (memory_load32_le(mem, 2080bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1984bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free requires (memory_load64_le(mem, 69656bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69664bv64) == 1912bv64); - free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69672bv64) == 1948bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2080bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2081bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2082bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2083bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2080bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1984bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69656bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69664bv64) == 1912bv64); - free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69672bv64) == 1948bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation sub_seven() { @@ -187,28 +168,22 @@ implementation sub_seven() procedure add_six(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; - free requires (memory_load8_le(mem, 2080bv64) == 1bv8); - free requires (memory_load8_le(mem, 2081bv64) == 0bv8); - free requires (memory_load8_le(mem, 2082bv64) == 2bv8); - free requires (memory_load8_le(mem, 2083bv64) == 0bv8); + free requires (memory_load32_le(mem, 2080bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1984bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free requires (memory_load64_le(mem, 69656bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69664bv64) == 1912bv64); - free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69672bv64) == 1948bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2080bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2081bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2082bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2083bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2080bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1984bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69656bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69664bv64) == 1912bv64); - free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69672bv64) == 1948bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation add_six() { @@ -239,32 +214,26 @@ procedure main(); free requires (memory_load64_le(mem, 69656bv64) == 1876bv64); free requires (memory_load64_le(mem, 69664bv64) == 1912bv64); free requires (memory_load64_le(mem, 69672bv64) == 1948bv64); - free requires (memory_load8_le(mem, 2080bv64) == 1bv8); - free requires (memory_load8_le(mem, 2081bv64) == 0bv8); - free requires (memory_load8_le(mem, 2082bv64) == 2bv8); - free requires (memory_load8_le(mem, 2083bv64) == 0bv8); + free requires (memory_load32_le(mem, 2080bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1984bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free requires (memory_load64_le(mem, 69656bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69664bv64) == 1912bv64); - free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69672bv64) == 1948bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2080bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2081bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2082bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2083bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2080bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1984bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69656bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69664bv64) == 1912bv64); - free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69672bv64) == 1948bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/jumptable2/gcc_O2/jumptable2_gtirb.expected b/src/test/correct/jumptable2/gcc_O2/jumptable2_gtirb.expected index 5c232874f..f0ab465db 100644 --- a/src/test/correct/jumptable2/gcc_O2/jumptable2_gtirb.expected +++ b/src/test/correct/jumptable2/gcc_O2/jumptable2_gtirb.expected @@ -20,7 +20,7 @@ const {:extern} $jump_table_addr: bv64; axiom ($jump_table_addr == 69656bv64); const {:extern} $x_addr: bv64; axiom ($x_addr == 69648bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -50,10 +50,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -65,24 +61,21 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2040bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2041bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2042bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2043bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2040bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69016bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69024bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1600bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69656bv64) == 1952bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69664bv64) == 1972bv64); - free ensures (memory_load64_le(mem, 69024bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69672bv64) == 2000bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -97,28 +90,22 @@ procedure {:extern} guarantee_reflexive(); procedure add_two(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; - free requires (memory_load8_le(mem, 2040bv64) == 1bv8); - free requires (memory_load8_le(mem, 2041bv64) == 0bv8); - free requires (memory_load8_le(mem, 2042bv64) == 2bv8); - free requires (memory_load8_le(mem, 2043bv64) == 0bv8); + free requires (memory_load32_le(mem, 2040bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69016bv64) == 1936bv64); + free requires (memory_load64_le(mem, 69024bv64) == 1856bv64); free requires (memory_load64_le(mem, 69616bv64) == 1600bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free requires (memory_load64_le(mem, 69656bv64) == 1952bv64); - free requires (memory_load64_le(mem, 69016bv64) == 1936bv64); free requires (memory_load64_le(mem, 69664bv64) == 1972bv64); - free requires (memory_load64_le(mem, 69024bv64) == 1856bv64); free requires (memory_load64_le(mem, 69672bv64) == 2000bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2040bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2041bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2042bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2043bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2040bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69016bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69024bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1600bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69656bv64) == 1952bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69664bv64) == 1972bv64); - free ensures (memory_load64_le(mem, 69024bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69672bv64) == 2000bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation add_two() { @@ -146,34 +133,28 @@ procedure main(); free requires (memory_load64_le(mem, 69656bv64) == 1952bv64); free requires (memory_load64_le(mem, 69664bv64) == 1972bv64); free requires (memory_load64_le(mem, 69672bv64) == 2000bv64); - free requires (memory_load8_le(mem, 2040bv64) == 1bv8); - free requires (memory_load8_le(mem, 2041bv64) == 0bv8); - free requires (memory_load8_le(mem, 2042bv64) == 2bv8); - free requires (memory_load8_le(mem, 2043bv64) == 0bv8); + free requires (memory_load32_le(mem, 2040bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69016bv64) == 1936bv64); + free requires (memory_load64_le(mem, 69024bv64) == 1856bv64); free requires (memory_load64_le(mem, 69616bv64) == 1600bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free requires (memory_load64_le(mem, 69656bv64) == 1952bv64); - free requires (memory_load64_le(mem, 69016bv64) == 1936bv64); free requires (memory_load64_le(mem, 69664bv64) == 1972bv64); - free requires (memory_load64_le(mem, 69024bv64) == 1856bv64); free requires (memory_load64_le(mem, 69672bv64) == 2000bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R19 == old(Gamma_R19)); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R19 == old(R19)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2040bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2041bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2042bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2043bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2040bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69016bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69024bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1600bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69656bv64) == 1952bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69664bv64) == 1972bv64); - free ensures (memory_load64_le(mem, 69024bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69672bv64) == 2000bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { @@ -226,28 +207,22 @@ implementation main() procedure sub_seven(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; - free requires (memory_load8_le(mem, 2040bv64) == 1bv8); - free requires (memory_load8_le(mem, 2041bv64) == 0bv8); - free requires (memory_load8_le(mem, 2042bv64) == 2bv8); - free requires (memory_load8_le(mem, 2043bv64) == 0bv8); + free requires (memory_load32_le(mem, 2040bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69016bv64) == 1936bv64); + free requires (memory_load64_le(mem, 69024bv64) == 1856bv64); free requires (memory_load64_le(mem, 69616bv64) == 1600bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free requires (memory_load64_le(mem, 69656bv64) == 1952bv64); - free requires (memory_load64_le(mem, 69016bv64) == 1936bv64); free requires (memory_load64_le(mem, 69664bv64) == 1972bv64); - free requires (memory_load64_le(mem, 69024bv64) == 1856bv64); free requires (memory_load64_le(mem, 69672bv64) == 2000bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2040bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2041bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2042bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2043bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2040bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69016bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69024bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1600bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69656bv64) == 1952bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69664bv64) == 1972bv64); - free ensures (memory_load64_le(mem, 69024bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69672bv64) == 2000bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation sub_seven() { @@ -269,28 +244,22 @@ implementation sub_seven() procedure add_six(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; - free requires (memory_load8_le(mem, 2040bv64) == 1bv8); - free requires (memory_load8_le(mem, 2041bv64) == 0bv8); - free requires (memory_load8_le(mem, 2042bv64) == 2bv8); - free requires (memory_load8_le(mem, 2043bv64) == 0bv8); + free requires (memory_load32_le(mem, 2040bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69016bv64) == 1936bv64); + free requires (memory_load64_le(mem, 69024bv64) == 1856bv64); free requires (memory_load64_le(mem, 69616bv64) == 1600bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free requires (memory_load64_le(mem, 69656bv64) == 1952bv64); - free requires (memory_load64_le(mem, 69016bv64) == 1936bv64); free requires (memory_load64_le(mem, 69664bv64) == 1972bv64); - free requires (memory_load64_le(mem, 69024bv64) == 1856bv64); free requires (memory_load64_le(mem, 69672bv64) == 2000bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2040bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2041bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2042bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2043bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2040bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69016bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69024bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1600bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69656bv64) == 1952bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69664bv64) == 1972bv64); - free ensures (memory_load64_le(mem, 69024bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69672bv64) == 2000bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation add_six() { diff --git a/src/test/correct/jumptable2/gcc_pic/jumptable2_gtirb.expected b/src/test/correct/jumptable2/gcc_pic/jumptable2_gtirb.expected index 9ff9781fc..b5833af5e 100644 --- a/src/test/correct/jumptable2/gcc_pic/jumptable2_gtirb.expected +++ b/src/test/correct/jumptable2/gcc_pic/jumptable2_gtirb.expected @@ -18,7 +18,7 @@ const {:extern} $jump_table_addr: bv64; axiom ($jump_table_addr == 69656bv64); const {:extern} $x_addr: bv64; axiom ($x_addr == 69648bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -48,10 +48,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -63,26 +59,23 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2144bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2145bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2146bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2147bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2144bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69592bv64) == 69648bv64); free ensures (memory_load64_le(mem, 69608bv64) == 2048bv64); + free ensures (memory_load64_le(mem, 69624bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69656bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69664bv64) == 1976bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); - free ensures (memory_load64_le(mem, 69624bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69592bv64) == 69648bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69672bv64) == 2012bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -97,32 +90,26 @@ procedure {:extern} guarantee_reflexive(); procedure add_two(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; - free requires (memory_load8_le(mem, 2144bv64) == 1bv8); - free requires (memory_load8_le(mem, 2145bv64) == 0bv8); - free requires (memory_load8_le(mem, 2146bv64) == 2bv8); - free requires (memory_load8_le(mem, 2147bv64) == 0bv8); + free requires (memory_load32_le(mem, 2144bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1936bv64); + free requires (memory_load64_le(mem, 69008bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69592bv64) == 69648bv64); free requires (memory_load64_le(mem, 69608bv64) == 2048bv64); + free requires (memory_load64_le(mem, 69624bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free requires (memory_load64_le(mem, 69656bv64) == 1940bv64); free requires (memory_load64_le(mem, 69664bv64) == 1976bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1856bv64); - free requires (memory_load64_le(mem, 69624bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69592bv64) == 69648bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1936bv64); free requires (memory_load64_le(mem, 69672bv64) == 2012bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2144bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2145bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2146bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2147bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2144bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69592bv64) == 69648bv64); free ensures (memory_load64_le(mem, 69608bv64) == 2048bv64); + free ensures (memory_load64_le(mem, 69624bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69656bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69664bv64) == 1976bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); - free ensures (memory_load64_le(mem, 69624bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69592bv64) == 69648bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69672bv64) == 2012bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation add_two() { @@ -149,32 +136,26 @@ implementation add_two() procedure sub_seven(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; - free requires (memory_load8_le(mem, 2144bv64) == 1bv8); - free requires (memory_load8_le(mem, 2145bv64) == 0bv8); - free requires (memory_load8_le(mem, 2146bv64) == 2bv8); - free requires (memory_load8_le(mem, 2147bv64) == 0bv8); + free requires (memory_load32_le(mem, 2144bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1936bv64); + free requires (memory_load64_le(mem, 69008bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69592bv64) == 69648bv64); free requires (memory_load64_le(mem, 69608bv64) == 2048bv64); + free requires (memory_load64_le(mem, 69624bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free requires (memory_load64_le(mem, 69656bv64) == 1940bv64); free requires (memory_load64_le(mem, 69664bv64) == 1976bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1856bv64); - free requires (memory_load64_le(mem, 69624bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69592bv64) == 69648bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1936bv64); free requires (memory_load64_le(mem, 69672bv64) == 2012bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2144bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2145bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2146bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2147bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2144bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69592bv64) == 69648bv64); free ensures (memory_load64_le(mem, 69608bv64) == 2048bv64); + free ensures (memory_load64_le(mem, 69624bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69656bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69664bv64) == 1976bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); - free ensures (memory_load64_le(mem, 69624bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69592bv64) == 69648bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69672bv64) == 2012bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation sub_seven() { @@ -207,36 +188,30 @@ procedure main(); free requires (memory_load64_le(mem, 69656bv64) == 1940bv64); free requires (memory_load64_le(mem, 69664bv64) == 1976bv64); free requires (memory_load64_le(mem, 69672bv64) == 2012bv64); - free requires (memory_load8_le(mem, 2144bv64) == 1bv8); - free requires (memory_load8_le(mem, 2145bv64) == 0bv8); - free requires (memory_load8_le(mem, 2146bv64) == 2bv8); - free requires (memory_load8_le(mem, 2147bv64) == 0bv8); + free requires (memory_load32_le(mem, 2144bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1936bv64); + free requires (memory_load64_le(mem, 69008bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69592bv64) == 69648bv64); free requires (memory_load64_le(mem, 69608bv64) == 2048bv64); + free requires (memory_load64_le(mem, 69624bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free requires (memory_load64_le(mem, 69656bv64) == 1940bv64); free requires (memory_load64_le(mem, 69664bv64) == 1976bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1856bv64); - free requires (memory_load64_le(mem, 69624bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69592bv64) == 69648bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1936bv64); free requires (memory_load64_le(mem, 69672bv64) == 2012bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R29 == old(Gamma_R29)); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2144bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2145bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2146bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2147bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2144bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69592bv64) == 69648bv64); free ensures (memory_load64_le(mem, 69608bv64) == 2048bv64); + free ensures (memory_load64_le(mem, 69624bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69656bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69664bv64) == 1976bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); - free ensures (memory_load64_le(mem, 69624bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69592bv64) == 69648bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69672bv64) == 2012bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { @@ -297,32 +272,26 @@ implementation main() procedure add_six(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; - free requires (memory_load8_le(mem, 2144bv64) == 1bv8); - free requires (memory_load8_le(mem, 2145bv64) == 0bv8); - free requires (memory_load8_le(mem, 2146bv64) == 2bv8); - free requires (memory_load8_le(mem, 2147bv64) == 0bv8); + free requires (memory_load32_le(mem, 2144bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1936bv64); + free requires (memory_load64_le(mem, 69008bv64) == 1856bv64); + free requires (memory_load64_le(mem, 69592bv64) == 69648bv64); free requires (memory_load64_le(mem, 69608bv64) == 2048bv64); + free requires (memory_load64_le(mem, 69624bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free requires (memory_load64_le(mem, 69656bv64) == 1940bv64); free requires (memory_load64_le(mem, 69664bv64) == 1976bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1856bv64); - free requires (memory_load64_le(mem, 69624bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69592bv64) == 69648bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1936bv64); free requires (memory_load64_le(mem, 69672bv64) == 2012bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 2144bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2145bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2146bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2147bv64) == 0bv8); + free ensures (memory_load32_le(mem, 2144bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1936bv64); + free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); + free ensures (memory_load64_le(mem, 69592bv64) == 69648bv64); free ensures (memory_load64_le(mem, 69608bv64) == 2048bv64); + free ensures (memory_load64_le(mem, 69624bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69656bv64) == 1940bv64); free ensures (memory_load64_le(mem, 69664bv64) == 1976bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1856bv64); - free ensures (memory_load64_le(mem, 69624bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69592bv64) == 69648bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69672bv64) == 2012bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation add_six() { diff --git a/src/test/correct/malloc_with_local/clang/malloc_with_local.expected b/src/test/correct/malloc_with_local/clang/malloc_with_local.expected index d744c417a..d54c96082 100644 --- a/src/test/correct/malloc_with_local/clang/malloc_with_local.expected +++ b/src/test/correct/malloc_with_local/clang/malloc_with_local.expected @@ -22,7 +22,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2256bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -79,8 +79,8 @@ function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2256bv64) == 2334386691848142849bv64); free ensures (memory_load64_le(mem, 2264bv64) == 4211825664600402019bv64); free ensures (memory_load64_le(mem, 2272bv64) == 7307182754559632672bv64); @@ -96,8 +96,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/malloc_with_local/clang/malloc_with_local_gtirb.expected b/src/test/correct/malloc_with_local/clang/malloc_with_local_gtirb.expected index 35a8659c0..6793f6257 100644 --- a/src/test/correct/malloc_with_local/clang/malloc_with_local_gtirb.expected +++ b/src/test/correct/malloc_with_local/clang/malloc_with_local_gtirb.expected @@ -22,7 +22,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2256bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -79,8 +79,8 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2256bv64) == 2334386691848142849bv64); free ensures (memory_load64_le(mem, 2264bv64) == 4211825664600402019bv64); free ensures (memory_load64_le(mem, 2272bv64) == 7307182754559632672bv64); @@ -96,8 +96,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/malloc_with_local/clang_O2/malloc_with_local.expected b/src/test/correct/malloc_with_local/clang_O2/malloc_with_local.expected index e967cbe24..ac16f7d10 100644 --- a/src/test/correct/malloc_with_local/clang_O2/malloc_with_local.expected +++ b/src/test/correct/malloc_with_local/clang_O2/malloc_with_local.expected @@ -41,8 +41,8 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); @@ -65,8 +65,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/malloc_with_local/clang_O2/malloc_with_local_gtirb.expected b/src/test/correct/malloc_with_local/clang_O2/malloc_with_local_gtirb.expected index 69e194091..1f4ef0422 100644 --- a/src/test/correct/malloc_with_local/clang_O2/malloc_with_local_gtirb.expected +++ b/src/test/correct/malloc_with_local/clang_O2/malloc_with_local_gtirb.expected @@ -18,7 +18,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 1964bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -45,8 +45,8 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); @@ -69,8 +69,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/malloc_with_local/gcc/malloc_with_local.expected b/src/test/correct/malloc_with_local/gcc/malloc_with_local.expected index 86442b2ef..a85bcc599 100644 --- a/src/test/correct/malloc_with_local/gcc/malloc_with_local.expected +++ b/src/test/correct/malloc_with_local/gcc/malloc_with_local.expected @@ -18,7 +18,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2248bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -75,8 +75,8 @@ function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2248bv64) == 131073bv64); free ensures (memory_load64_le(mem, 2256bv64) == 8241983568019286100bv64); free ensures (memory_load64_le(mem, 2264bv64) == 748482783423457568bv64); @@ -96,8 +96,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/malloc_with_local/gcc/malloc_with_local_gtirb.expected b/src/test/correct/malloc_with_local/gcc/malloc_with_local_gtirb.expected index 9c82490bb..b0dce8dca 100644 --- a/src/test/correct/malloc_with_local/gcc/malloc_with_local_gtirb.expected +++ b/src/test/correct/malloc_with_local/gcc/malloc_with_local_gtirb.expected @@ -18,7 +18,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2248bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -75,8 +75,8 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2248bv64) == 131073bv64); free ensures (memory_load64_le(mem, 2256bv64) == 8241983568019286100bv64); free ensures (memory_load64_le(mem, 2264bv64) == 748482783423457568bv64); @@ -96,8 +96,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/malloc_with_local/gcc_O2/malloc_with_local.expected b/src/test/correct/malloc_with_local/gcc_O2/malloc_with_local.expected index 2b6826c22..715409413 100644 --- a/src/test/correct/malloc_with_local/gcc_O2/malloc_with_local.expected +++ b/src/test/correct/malloc_with_local/gcc_O2/malloc_with_local.expected @@ -43,8 +43,8 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2088bv64) == 131073bv64); free ensures (memory_load64_le(mem, 2096bv64) == 8241983568019286100bv64); free ensures (memory_load64_le(mem, 2104bv64) == 748482783423457568bv64); @@ -64,8 +64,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/malloc_with_local/gcc_O2/malloc_with_local_gtirb.expected b/src/test/correct/malloc_with_local/gcc_O2/malloc_with_local_gtirb.expected index 026f5518e..3b14ada56 100644 --- a/src/test/correct/malloc_with_local/gcc_O2/malloc_with_local_gtirb.expected +++ b/src/test/correct/malloc_with_local/gcc_O2/malloc_with_local_gtirb.expected @@ -20,7 +20,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2088bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -47,8 +47,8 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2088bv64) == 131073bv64); free ensures (memory_load64_le(mem, 2096bv64) == 8241983568019286100bv64); free ensures (memory_load64_le(mem, 2104bv64) == 748482783423457568bv64); @@ -68,8 +68,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/malloc_with_local2/clang/malloc_with_local2.expected b/src/test/correct/malloc_with_local2/clang/malloc_with_local2.expected index 9503872ed..b2883ffa9 100644 --- a/src/test/correct/malloc_with_local2/clang/malloc_with_local2.expected +++ b/src/test/correct/malloc_with_local2/clang/malloc_with_local2.expected @@ -22,7 +22,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2292bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -79,8 +79,8 @@ function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load8_le(mem, 2292bv64) == 1bv8); free ensures (memory_load8_le(mem, 2293bv64) == 0bv8); free ensures (memory_load8_le(mem, 2294bv64) == 2bv8); @@ -103,8 +103,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/malloc_with_local2/clang/malloc_with_local2_gtirb.expected b/src/test/correct/malloc_with_local2/clang/malloc_with_local2_gtirb.expected index 22e7f1347..076949b82 100644 --- a/src/test/correct/malloc_with_local2/clang/malloc_with_local2_gtirb.expected +++ b/src/test/correct/malloc_with_local2/clang/malloc_with_local2_gtirb.expected @@ -22,7 +22,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2292bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -79,8 +79,8 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load8_le(mem, 2292bv64) == 1bv8); free ensures (memory_load8_le(mem, 2293bv64) == 0bv8); free ensures (memory_load8_le(mem, 2294bv64) == 2bv8); @@ -103,8 +103,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/malloc_with_local2/gcc/malloc_with_local2.expected b/src/test/correct/malloc_with_local2/gcc/malloc_with_local2.expected index ba606abce..553af7a9e 100644 --- a/src/test/correct/malloc_with_local2/gcc/malloc_with_local2.expected +++ b/src/test/correct/malloc_with_local2/gcc/malloc_with_local2.expected @@ -18,7 +18,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2272bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -75,8 +75,8 @@ function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2272bv64) == 131073bv64); free ensures (memory_load64_le(mem, 2280bv64) == 8241983568019286100bv64); free ensures (memory_load64_le(mem, 2288bv64) == 748482783423457568bv64); @@ -96,8 +96,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/malloc_with_local2/gcc/malloc_with_local2_gtirb.expected b/src/test/correct/malloc_with_local2/gcc/malloc_with_local2_gtirb.expected index 034a6ffd3..782535552 100644 --- a/src/test/correct/malloc_with_local2/gcc/malloc_with_local2_gtirb.expected +++ b/src/test/correct/malloc_with_local2/gcc/malloc_with_local2_gtirb.expected @@ -18,7 +18,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2272bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -75,8 +75,8 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2272bv64) == 131073bv64); free ensures (memory_load64_le(mem, 2280bv64) == 8241983568019286100bv64); free ensures (memory_load64_le(mem, 2288bv64) == 748482783423457568bv64); @@ -96,8 +96,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/malloc_with_local3/clang/malloc_with_local3.expected b/src/test/correct/malloc_with_local3/clang/malloc_with_local3.expected index f8b3fd019..e26d62293 100644 --- a/src/test/correct/malloc_with_local3/clang/malloc_with_local3.expected +++ b/src/test/correct/malloc_with_local3/clang/malloc_with_local3.expected @@ -22,7 +22,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2344bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -80,8 +80,8 @@ function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2344bv64) == 2334386691848142849bv64); free ensures (memory_load64_le(mem, 2352bv64) == 2322295453216173673bv64); free ensures (memory_load64_le(mem, 2360bv64) == 2334386691848692773bv64); @@ -103,8 +103,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/malloc_with_local3/clang/malloc_with_local3_gtirb.expected b/src/test/correct/malloc_with_local3/clang/malloc_with_local3_gtirb.expected index 508af9ec4..4cfaf99f2 100644 --- a/src/test/correct/malloc_with_local3/clang/malloc_with_local3_gtirb.expected +++ b/src/test/correct/malloc_with_local3/clang/malloc_with_local3_gtirb.expected @@ -22,7 +22,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2344bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -80,8 +80,8 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2344bv64) == 2334386691848142849bv64); free ensures (memory_load64_le(mem, 2352bv64) == 2322295453216173673bv64); free ensures (memory_load64_le(mem, 2360bv64) == 2334386691848692773bv64); @@ -103,8 +103,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/malloc_with_local3/gcc/malloc_with_local3.expected b/src/test/correct/malloc_with_local3/gcc/malloc_with_local3.expected index b090008a9..24cfa9582 100644 --- a/src/test/correct/malloc_with_local3/gcc/malloc_with_local3.expected +++ b/src/test/correct/malloc_with_local3/gcc/malloc_with_local3.expected @@ -18,7 +18,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2328bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -76,8 +76,8 @@ function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2328bv64) == 131073bv64); free ensures (memory_load64_le(mem, 2336bv64) == 2338615504306268244bv64); free ensures (memory_load64_le(mem, 2344bv64) == 2924860384375657bv64); @@ -101,8 +101,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/malloc_with_local3/gcc/malloc_with_local3_gtirb.expected b/src/test/correct/malloc_with_local3/gcc/malloc_with_local3_gtirb.expected index dfaf0a7b1..6206c7d40 100644 --- a/src/test/correct/malloc_with_local3/gcc/malloc_with_local3_gtirb.expected +++ b/src/test/correct/malloc_with_local3/gcc/malloc_with_local3_gtirb.expected @@ -18,7 +18,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2328bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -76,8 +76,8 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2328bv64) == 131073bv64); free ensures (memory_load64_le(mem, 2336bv64) == 2338615504306268244bv64); free ensures (memory_load64_le(mem, 2344bv64) == 2924860384375657bv64); @@ -101,8 +101,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/malloc_with_local3/gcc_O2/malloc_with_local3.expected b/src/test/correct/malloc_with_local3/gcc_O2/malloc_with_local3.expected index 7b2e9ba3f..6121cf2f2 100644 --- a/src/test/correct/malloc_with_local3/gcc_O2/malloc_with_local3.expected +++ b/src/test/correct/malloc_with_local3/gcc_O2/malloc_with_local3.expected @@ -24,7 +24,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2264bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -66,8 +66,8 @@ function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2264bv64) == 131073bv64); free ensures (memory_load64_le(mem, 2272bv64) == 8241983568019286100bv64); free ensures (memory_load64_le(mem, 2280bv64) == 7575166128241079840bv64); @@ -87,8 +87,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -368,6 +368,6 @@ implementation printCharValue() mem, Gamma_mem := memory_store8_le(mem, R3, R2[8:0]), gamma_store8(Gamma_mem, R3, Gamma_R2); assume {:captureState "%00000293"} true; call __printf_chk(); - assume false; //no return target + assume false; } diff --git a/src/test/correct/malloc_with_local3/gcc_O2/malloc_with_local3_gtirb.expected b/src/test/correct/malloc_with_local3/gcc_O2/malloc_with_local3_gtirb.expected index b26def393..86a87c185 100644 --- a/src/test/correct/malloc_with_local3/gcc_O2/malloc_with_local3_gtirb.expected +++ b/src/test/correct/malloc_with_local3/gcc_O2/malloc_with_local3_gtirb.expected @@ -24,7 +24,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2264bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -66,8 +66,8 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2264bv64) == 131073bv64); free ensures (memory_load64_le(mem, 2272bv64) == 8241983568019286100bv64); free ensures (memory_load64_le(mem, 2280bv64) == 7575166128241079840bv64); @@ -87,8 +87,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/multi_malloc/clang/multi_malloc.expected b/src/test/correct/multi_malloc/clang/multi_malloc.expected index c9b7066c9..8d34e535f 100644 --- a/src/test/correct/multi_malloc/clang/multi_malloc.expected +++ b/src/test/correct/multi_malloc/clang/multi_malloc.expected @@ -22,7 +22,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2232bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -79,8 +79,8 @@ function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2232bv64) == 2334386691848142849bv64); free ensures (memory_load64_le(mem, 2240bv64) == 4211825664600402019bv64); free ensures (memory_load64_le(mem, 2248bv64) == 7307182754559632672bv64); @@ -97,8 +97,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/multi_malloc/clang/multi_malloc_gtirb.expected b/src/test/correct/multi_malloc/clang/multi_malloc_gtirb.expected index 4cb699d0b..7481729ca 100644 --- a/src/test/correct/multi_malloc/clang/multi_malloc_gtirb.expected +++ b/src/test/correct/multi_malloc/clang/multi_malloc_gtirb.expected @@ -22,7 +22,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2232bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -79,8 +79,8 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2232bv64) == 2334386691848142849bv64); free ensures (memory_load64_le(mem, 2240bv64) == 4211825664600402019bv64); free ensures (memory_load64_le(mem, 2248bv64) == 7307182754559632672bv64); @@ -97,8 +97,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/multi_malloc/gcc/multi_malloc.expected b/src/test/correct/multi_malloc/gcc/multi_malloc.expected index f81950ff7..509eb4da3 100644 --- a/src/test/correct/multi_malloc/gcc/multi_malloc.expected +++ b/src/test/correct/multi_malloc/gcc/multi_malloc.expected @@ -18,7 +18,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2224bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -75,8 +75,8 @@ function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2224bv64) == 131073bv64); free ensures (memory_load64_le(mem, 2232bv64) == 8241983568019286100bv64); free ensures (memory_load64_le(mem, 2240bv64) == 748482783423457568bv64); @@ -90,8 +90,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/multi_malloc/gcc/multi_malloc_gtirb.expected b/src/test/correct/multi_malloc/gcc/multi_malloc_gtirb.expected index bddf81deb..72a908f31 100644 --- a/src/test/correct/multi_malloc/gcc/multi_malloc_gtirb.expected +++ b/src/test/correct/multi_malloc/gcc/multi_malloc_gtirb.expected @@ -18,7 +18,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 2224bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -75,8 +75,8 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); free ensures (memory_load64_le(mem, 2224bv64) == 131073bv64); free ensures (memory_load64_le(mem, 2232bv64) == 8241983568019286100bv64); free ensures (memory_load64_le(mem, 2240bv64) == 748482783423457568bv64); @@ -90,8 +90,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { diff --git a/src/test/correct/no_interference_update_x/clang/no_interference_update_x.expected b/src/test/correct/no_interference_update_x/clang/no_interference_update_x.expected index 8824e2170..489874491 100644 --- a/src/test/correct/no_interference_update_x/clang/no_interference_update_x.expected +++ b/src/test/correct/no_interference_update_x/clang/no_interference_update_x.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -39,10 +35,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -77,19 +70,13 @@ procedure main(); modifies Gamma_R0, Gamma_R8, Gamma_R9, Gamma_mem, R0, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1852bv64) == 1bv8); - free requires (memory_load8_le(mem, 1853bv64) == 0bv8); - free requires (memory_load8_le(mem, 1854bv64) == 2bv8); - free requires (memory_load8_le(mem, 1855bv64) == 0bv8); + free requires (memory_load32_le(mem, 1852bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (memory_load32_le(mem, $x_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/no_interference_update_x/clang/no_interference_update_x_gtirb.expected b/src/test/correct/no_interference_update_x/clang/no_interference_update_x_gtirb.expected index a0daae130..cae7dacd2 100644 --- a/src/test/correct/no_interference_update_x/clang/no_interference_update_x_gtirb.expected +++ b/src/test/correct/no_interference_update_x/clang/no_interference_update_x_gtirb.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -39,10 +35,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -77,19 +70,13 @@ procedure main(); modifies Gamma_R0, Gamma_R8, Gamma_R9, Gamma_mem, R0, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1852bv64) == 1bv8); - free requires (memory_load8_le(mem, 1853bv64) == 0bv8); - free requires (memory_load8_le(mem, 1854bv64) == 2bv8); - free requires (memory_load8_le(mem, 1855bv64) == 0bv8); + free requires (memory_load32_le(mem, 1852bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (memory_load32_le(mem, $x_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/no_interference_update_x/clang_pic/no_interference_update_x.expected b/src/test/correct/no_interference_update_x/clang_pic/no_interference_update_x.expected index 0ba50c6e8..39c2b88dd 100644 --- a/src/test/correct/no_interference_update_x/clang_pic/no_interference_update_x.expected +++ b/src/test/correct/no_interference_update_x/clang_pic/no_interference_update_x.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,15 +39,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -82,25 +75,19 @@ procedure main(); modifies Gamma_R0, Gamma_R8, Gamma_R9, Gamma_mem, R0, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (memory_load32_le(mem, $x_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/no_interference_update_x/clang_pic/no_interference_update_x_gtirb.expected b/src/test/correct/no_interference_update_x/clang_pic/no_interference_update_x_gtirb.expected index 51d3fd212..937fa0e13 100644 --- a/src/test/correct/no_interference_update_x/clang_pic/no_interference_update_x_gtirb.expected +++ b/src/test/correct/no_interference_update_x/clang_pic/no_interference_update_x_gtirb.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,15 +39,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -82,25 +75,19 @@ procedure main(); modifies Gamma_R0, Gamma_R8, Gamma_R9, Gamma_mem, R0, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (memory_load32_le(mem, $x_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/no_interference_update_x/gcc/no_interference_update_x.expected b/src/test/correct/no_interference_update_x/gcc/no_interference_update_x.expected index 39715fe78..e88a5a93e 100644 --- a/src/test/correct/no_interference_update_x/gcc/no_interference_update_x.expected +++ b/src/test/correct/no_interference_update_x/gcc/no_interference_update_x.expected @@ -8,7 +8,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -25,10 +25,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -37,10 +33,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -75,19 +68,13 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (memory_load32_le(mem, $x_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/no_interference_update_x/gcc/no_interference_update_x_gtirb.expected b/src/test/correct/no_interference_update_x/gcc/no_interference_update_x_gtirb.expected index a7e28fd8a..a0f5daea6 100644 --- a/src/test/correct/no_interference_update_x/gcc/no_interference_update_x_gtirb.expected +++ b/src/test/correct/no_interference_update_x/gcc/no_interference_update_x_gtirb.expected @@ -8,7 +8,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -25,10 +25,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -37,10 +33,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -75,19 +68,13 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (memory_load32_le(mem, $x_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/no_interference_update_x/gcc_O2/no_interference_update_x.expected b/src/test/correct/no_interference_update_x/gcc_O2/no_interference_update_x.expected index 812b72308..4979c392b 100644 --- a/src/test/correct/no_interference_update_x/gcc_O2/no_interference_update_x.expected +++ b/src/test/correct/no_interference_update_x/gcc_O2/no_interference_update_x.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -39,10 +35,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -77,19 +70,13 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R2, Gamma_mem, R0, R1, R2, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (memory_load32_le(mem, $x_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/no_interference_update_x/gcc_O2/no_interference_update_x_gtirb.expected b/src/test/correct/no_interference_update_x/gcc_O2/no_interference_update_x_gtirb.expected index 5addc892a..e53c49059 100644 --- a/src/test/correct/no_interference_update_x/gcc_O2/no_interference_update_x_gtirb.expected +++ b/src/test/correct/no_interference_update_x/gcc_O2/no_interference_update_x_gtirb.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -39,10 +35,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -77,19 +70,13 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R2, Gamma_mem, R0, R1, R2, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (memory_load32_le(mem, $x_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/no_interference_update_x/gcc_pic/no_interference_update_x.expected b/src/test/correct/no_interference_update_x/gcc_pic/no_interference_update_x.expected index b3dff2519..fbd858afc 100644 --- a/src/test/correct/no_interference_update_x/gcc_pic/no_interference_update_x.expected +++ b/src/test/correct/no_interference_update_x/gcc_pic/no_interference_update_x.expected @@ -8,7 +8,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -41,15 +37,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -80,25 +73,19 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (memory_load32_le(mem, $x_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/no_interference_update_x/gcc_pic/no_interference_update_x_gtirb.expected b/src/test/correct/no_interference_update_x/gcc_pic/no_interference_update_x_gtirb.expected index 5714ee446..49ac0244c 100644 --- a/src/test/correct/no_interference_update_x/gcc_pic/no_interference_update_x_gtirb.expected +++ b/src/test/correct/no_interference_update_x/gcc_pic/no_interference_update_x_gtirb.expected @@ -8,7 +8,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -41,15 +37,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -80,25 +73,19 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (memory_load32_le(mem, $x_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/no_interference_update_y/clang/no_interference_update_y.expected b/src/test/correct/no_interference_update_y/clang/no_interference_update_y.expected index 47005edc5..c730fada4 100644 --- a/src/test/correct/no_interference_update_y/clang/no_interference_update_y.expected +++ b/src/test/correct/no_interference_update_y/clang/no_interference_update_y.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -39,10 +35,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -77,19 +70,13 @@ procedure main(); modifies Gamma_R0, Gamma_R8, Gamma_R9, Gamma_mem, R0, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1852bv64) == 1bv8); - free requires (memory_load8_le(mem, 1853bv64) == 0bv8); - free requires (memory_load8_le(mem, 1854bv64) == 2bv8); - free requires (memory_load8_le(mem, 1855bv64) == 0bv8); + free requires (memory_load32_le(mem, 1852bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (memory_load32_le(mem, $y_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/no_interference_update_y/clang/no_interference_update_y_gtirb.expected b/src/test/correct/no_interference_update_y/clang/no_interference_update_y_gtirb.expected index 9d04098e1..8bf937d63 100644 --- a/src/test/correct/no_interference_update_y/clang/no_interference_update_y_gtirb.expected +++ b/src/test/correct/no_interference_update_y/clang/no_interference_update_y_gtirb.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -39,10 +35,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -77,19 +70,13 @@ procedure main(); modifies Gamma_R0, Gamma_R8, Gamma_R9, Gamma_mem, R0, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1852bv64) == 1bv8); - free requires (memory_load8_le(mem, 1853bv64) == 0bv8); - free requires (memory_load8_le(mem, 1854bv64) == 2bv8); - free requires (memory_load8_le(mem, 1855bv64) == 0bv8); + free requires (memory_load32_le(mem, 1852bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (memory_load32_le(mem, $y_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/no_interference_update_y/clang_pic/no_interference_update_y.expected b/src/test/correct/no_interference_update_y/clang_pic/no_interference_update_y.expected index 99d2305ec..01bd7a9e8 100644 --- a/src/test/correct/no_interference_update_y/clang_pic/no_interference_update_y.expected +++ b/src/test/correct/no_interference_update_y/clang_pic/no_interference_update_y.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,15 +39,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -82,25 +75,19 @@ procedure main(); modifies Gamma_R0, Gamma_R8, Gamma_R9, Gamma_mem, R0, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (memory_load32_le(mem, $y_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/no_interference_update_y/clang_pic/no_interference_update_y_gtirb.expected b/src/test/correct/no_interference_update_y/clang_pic/no_interference_update_y_gtirb.expected index ce221cf8e..9d3394cc3 100644 --- a/src/test/correct/no_interference_update_y/clang_pic/no_interference_update_y_gtirb.expected +++ b/src/test/correct/no_interference_update_y/clang_pic/no_interference_update_y_gtirb.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,15 +39,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -82,25 +75,19 @@ procedure main(); modifies Gamma_R0, Gamma_R8, Gamma_R9, Gamma_mem, R0, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); ensures (memory_load32_le(mem, $y_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/no_interference_update_y/gcc/no_interference_update_y.expected b/src/test/correct/no_interference_update_y/gcc/no_interference_update_y.expected index 4e0b74c0f..707922203 100644 --- a/src/test/correct/no_interference_update_y/gcc/no_interference_update_y.expected +++ b/src/test/correct/no_interference_update_y/gcc/no_interference_update_y.expected @@ -8,7 +8,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -25,10 +25,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -37,10 +33,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -75,19 +68,13 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (memory_load32_le(mem, $y_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/no_interference_update_y/gcc/no_interference_update_y_gtirb.expected b/src/test/correct/no_interference_update_y/gcc/no_interference_update_y_gtirb.expected index 524df54f5..046da73bd 100644 --- a/src/test/correct/no_interference_update_y/gcc/no_interference_update_y_gtirb.expected +++ b/src/test/correct/no_interference_update_y/gcc/no_interference_update_y_gtirb.expected @@ -8,7 +8,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -25,10 +25,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -37,10 +33,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -75,19 +68,13 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (memory_load32_le(mem, $y_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/no_interference_update_y/gcc_O2/no_interference_update_y.expected b/src/test/correct/no_interference_update_y/gcc_O2/no_interference_update_y.expected index a45d9640a..2985f6029 100644 --- a/src/test/correct/no_interference_update_y/gcc_O2/no_interference_update_y.expected +++ b/src/test/correct/no_interference_update_y/gcc_O2/no_interference_update_y.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69656bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -39,10 +35,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -77,19 +70,13 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R2, Gamma_mem, R0, R1, R2, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (memory_load32_le(mem, $y_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/no_interference_update_y/gcc_O2/no_interference_update_y_gtirb.expected b/src/test/correct/no_interference_update_y/gcc_O2/no_interference_update_y_gtirb.expected index 873279301..1b0e1ce61 100644 --- a/src/test/correct/no_interference_update_y/gcc_O2/no_interference_update_y_gtirb.expected +++ b/src/test/correct/no_interference_update_y/gcc_O2/no_interference_update_y_gtirb.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69656bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -27,10 +27,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -39,10 +35,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -77,19 +70,13 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R2, Gamma_mem, R0, R1, R2, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (memory_load32_le(mem, $y_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/no_interference_update_y/gcc_pic/no_interference_update_y.expected b/src/test/correct/no_interference_update_y/gcc_pic/no_interference_update_y.expected index 8d57819eb..72fb704a7 100644 --- a/src/test/correct/no_interference_update_y/gcc_pic/no_interference_update_y.expected +++ b/src/test/correct/no_interference_update_y/gcc_pic/no_interference_update_y.expected @@ -8,7 +8,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -41,15 +37,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -80,25 +73,19 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (memory_load32_le(mem, $y_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/no_interference_update_y/gcc_pic/no_interference_update_y_gtirb.expected b/src/test/correct/no_interference_update_y/gcc_pic/no_interference_update_y_gtirb.expected index b9ffef1ce..2dc3bd3bb 100644 --- a/src/test/correct/no_interference_update_y/gcc_pic/no_interference_update_y_gtirb.expected +++ b/src/test/correct/no_interference_update_y/gcc_pic/no_interference_update_y_gtirb.expected @@ -8,7 +8,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $y_addr: bv64; axiom ($y_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $y_addr) then true else (if (index == $x_addr) then true else false)) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -41,15 +37,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $y_addr) == old(memory_load32_le(mem, $y_addr))); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -80,25 +73,19 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); ensures (memory_load32_le(mem, $y_addr) == 1bv32); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/secret_write/clang/secret_write.expected b/src/test/correct/secret_write/clang/secret_write.expected index 9fd104c05..ff731a250 100644 --- a/src/test/correct/secret_write/clang/secret_write.expected +++ b/src/test/correct/secret_write/clang/secret_write.expected @@ -14,8 +14,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69692bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(memory, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(mem$in, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -38,10 +38,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -52,10 +48,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -96,18 +89,12 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 0bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1892bv64) == 1bv8); - free requires (memory_load8_le(mem, 1893bv64) == 0bv8); - free requires (memory_load8_le(mem, 1894bv64) == 2bv8); - free requires (memory_load8_le(mem, 1895bv64) == 0bv8); + free requires (memory_load32_le(mem, 1892bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/secret_write/clang/secret_write_gtirb.expected b/src/test/correct/secret_write/clang/secret_write_gtirb.expected index 6c22b7f34..e9d02dcdd 100644 --- a/src/test/correct/secret_write/clang/secret_write_gtirb.expected +++ b/src/test/correct/secret_write/clang/secret_write_gtirb.expected @@ -14,8 +14,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69692bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(memory, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(mem$in, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -38,10 +38,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -52,10 +48,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -96,18 +89,12 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 0bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1892bv64) == 1bv8); - free requires (memory_load8_le(mem, 1893bv64) == 0bv8); - free requires (memory_load8_le(mem, 1894bv64) == 2bv8); - free requires (memory_load8_le(mem, 1895bv64) == 0bv8); + free requires (memory_load32_le(mem, 1892bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/secret_write/clang_O2/secret_write.expected b/src/test/correct/secret_write/clang_O2/secret_write.expected index a9048843f..48a428eaa 100644 --- a/src/test/correct/secret_write/clang_O2/secret_write.expected +++ b/src/test/correct/secret_write/clang_O2/secret_write.expected @@ -14,8 +14,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69692bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(memory, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(mem$in, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -37,10 +37,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,10 +46,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -94,18 +87,12 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 0bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1860bv64) == 1bv8); - free requires (memory_load8_le(mem, 1861bv64) == 0bv8); - free requires (memory_load8_le(mem, 1862bv64) == 2bv8); - free requires (memory_load8_le(mem, 1863bv64) == 0bv8); + free requires (memory_load32_le(mem, 1860bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/secret_write/clang_O2/secret_write_gtirb.expected b/src/test/correct/secret_write/clang_O2/secret_write_gtirb.expected index 8e15ef31b..289dadb93 100644 --- a/src/test/correct/secret_write/clang_O2/secret_write_gtirb.expected +++ b/src/test/correct/secret_write/clang_O2/secret_write_gtirb.expected @@ -14,8 +14,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69692bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(memory, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(mem$in, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -37,10 +37,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,10 +46,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -94,18 +87,12 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 0bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1860bv64) == 1bv8); - free requires (memory_load8_le(mem, 1861bv64) == 0bv8); - free requires (memory_load8_le(mem, 1862bv64) == 2bv8); - free requires (memory_load8_le(mem, 1863bv64) == 0bv8); + free requires (memory_load32_le(mem, 1860bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1860bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1861bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1862bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1863bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1860bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/secret_write/clang_pic/secret_write.expected b/src/test/correct/secret_write/clang_pic/secret_write.expected index ec9017dde..c6a6d5878 100644 --- a/src/test/correct/secret_write/clang_pic/secret_write.expected +++ b/src/test/correct/secret_write/clang_pic/secret_write.expected @@ -14,8 +14,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69692bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(memory, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(mem$in, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -42,10 +42,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -56,17 +52,14 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1968bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1969bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1970bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1971bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69048bv64) == 1792bv64); + free ensures (memory_load32_le(mem, 1968bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69040bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69048bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69560bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69692bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69560bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -103,28 +96,22 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 0bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1968bv64) == 1bv8); - free requires (memory_load8_le(mem, 1969bv64) == 0bv8); - free requires (memory_load8_le(mem, 1970bv64) == 2bv8); - free requires (memory_load8_le(mem, 1971bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); - free requires (memory_load64_le(mem, 69048bv64) == 1792bv64); + free requires (memory_load32_le(mem, 1968bv64) == 131073bv32); free requires (memory_load64_le(mem, 69040bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69048bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69560bv64) == 69684bv64); free requires (memory_load64_le(mem, 69568bv64) == 69692bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69560bv64) == 69684bv64); - free ensures (memory_load8_le(mem, 1968bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1969bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1970bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1971bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69048bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1968bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69040bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69048bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69560bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69692bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69560bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/secret_write/clang_pic/secret_write_gtirb.expected b/src/test/correct/secret_write/clang_pic/secret_write_gtirb.expected index 848e9040c..5d1c612d0 100644 --- a/src/test/correct/secret_write/clang_pic/secret_write_gtirb.expected +++ b/src/test/correct/secret_write/clang_pic/secret_write_gtirb.expected @@ -14,8 +14,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69692bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(memory, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(mem$in, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -42,10 +42,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -56,17 +52,14 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1968bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1969bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1970bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1971bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69048bv64) == 1792bv64); + free ensures (memory_load32_le(mem, 1968bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69040bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69048bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69560bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69692bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69560bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -103,28 +96,22 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 0bv32); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1968bv64) == 1bv8); - free requires (memory_load8_le(mem, 1969bv64) == 0bv8); - free requires (memory_load8_le(mem, 1970bv64) == 2bv8); - free requires (memory_load8_le(mem, 1971bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); - free requires (memory_load64_le(mem, 69048bv64) == 1792bv64); + free requires (memory_load32_le(mem, 1968bv64) == 131073bv32); free requires (memory_load64_le(mem, 69040bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69048bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69560bv64) == 69684bv64); free requires (memory_load64_le(mem, 69568bv64) == 69692bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69560bv64) == 69684bv64); - free ensures (memory_load8_le(mem, 1968bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1969bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1970bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1971bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); - free ensures (memory_load64_le(mem, 69048bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1968bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69040bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69048bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69560bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69692bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69560bv64) == 69684bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/secret_write/gcc/secret_write.expected b/src/test/correct/secret_write/gcc/secret_write.expected index c0e03ae6f..af58dd477 100644 --- a/src/test/correct/secret_write/gcc/secret_write.expected +++ b/src/test/correct/secret_write/gcc/secret_write.expected @@ -10,8 +10,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69656bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(memory, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(mem$in, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -34,10 +34,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -48,10 +44,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1944bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1945bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1946bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1947bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1944bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -92,18 +85,12 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1944bv64) == 1bv8); - free requires (memory_load8_le(mem, 1945bv64) == 0bv8); - free requires (memory_load8_le(mem, 1946bv64) == 2bv8); - free requires (memory_load8_le(mem, 1947bv64) == 0bv8); + free requires (memory_load32_le(mem, 1944bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1944bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1945bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1946bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1947bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1944bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/secret_write/gcc/secret_write_gtirb.expected b/src/test/correct/secret_write/gcc/secret_write_gtirb.expected index 38bbdd467..29dca0c16 100644 --- a/src/test/correct/secret_write/gcc/secret_write_gtirb.expected +++ b/src/test/correct/secret_write/gcc/secret_write_gtirb.expected @@ -10,8 +10,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69656bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(memory, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(mem$in, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -34,10 +34,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -48,10 +44,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1944bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1945bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1946bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1947bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1944bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -92,18 +85,12 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1944bv64) == 1bv8); - free requires (memory_load8_le(mem, 1945bv64) == 0bv8); - free requires (memory_load8_le(mem, 1946bv64) == 2bv8); - free requires (memory_load8_le(mem, 1947bv64) == 0bv8); + free requires (memory_load32_le(mem, 1944bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1944bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1945bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1946bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1947bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1944bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/secret_write/gcc_O2/secret_write.expected b/src/test/correct/secret_write/gcc_O2/secret_write.expected index 0f26390fb..543cf34f9 100644 --- a/src/test/correct/secret_write/gcc_O2/secret_write.expected +++ b/src/test/correct/secret_write/gcc_O2/secret_write.expected @@ -14,8 +14,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(memory, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(mem$in, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -37,10 +37,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,10 +46,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -94,18 +87,12 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/secret_write/gcc_O2/secret_write_gtirb.expected b/src/test/correct/secret_write/gcc_O2/secret_write_gtirb.expected index 7af63a49b..84db56744 100644 --- a/src/test/correct/secret_write/gcc_O2/secret_write_gtirb.expected +++ b/src/test/correct/secret_write/gcc_O2/secret_write_gtirb.expected @@ -14,8 +14,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(memory, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(mem$in, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -37,10 +37,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,10 +46,7 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -94,18 +87,12 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/secret_write/gcc_pic/secret_write.expected b/src/test/correct/secret_write/gcc_pic/secret_write.expected index 64d5aff6b..1b1aa27ca 100644 --- a/src/test/correct/secret_write/gcc_pic/secret_write.expected +++ b/src/test/correct/secret_write/gcc_pic/secret_write.expected @@ -10,8 +10,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69656bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(memory, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(mem$in, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -38,10 +38,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -52,17 +48,14 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 2008bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2009bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2010bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2011bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69660bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 2008bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 68992bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69000bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69584bv64) == 69652bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 68992bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69660bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -99,28 +92,22 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 2008bv64) == 1bv8); - free requires (memory_load8_le(mem, 2009bv64) == 0bv8); - free requires (memory_load8_le(mem, 2010bv64) == 2bv8); - free requires (memory_load8_le(mem, 2011bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69660bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 2008bv64) == 131073bv32); + free requires (memory_load64_le(mem, 68992bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69000bv64) == 1792bv64); free requires (memory_load64_le(mem, 69584bv64) == 69652bv64); free requires (memory_load64_le(mem, 69592bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1792bv64); - free requires (memory_load64_le(mem, 68992bv64) == 1872bv64); - free ensures (memory_load8_le(mem, 2008bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2009bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2010bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2011bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69660bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69660bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 2008bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 68992bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69000bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69584bv64) == 69652bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 68992bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69660bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/secret_write/gcc_pic/secret_write_gtirb.expected b/src/test/correct/secret_write/gcc_pic/secret_write_gtirb.expected index e7f9eb3a1..9ad582f9c 100644 --- a/src/test/correct/secret_write/gcc_pic/secret_write_gtirb.expected +++ b/src/test/correct/secret_write/gcc_pic/secret_write_gtirb.expected @@ -10,8 +10,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69656bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(memory, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $secret_addr) then false else (if (index == $x_addr) then (bvsmod32(memory_load32_le(mem$in, $z_addr), 2bv32) == 0bv32) else (if (index == $z_addr) then true else false))) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -38,10 +38,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -52,17 +48,14 @@ procedure {:extern} rely(); ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $z_addr) == old(memory_load32_le(mem, $z_addr))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 2008bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2009bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2010bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2011bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69660bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 2008bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 68992bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69000bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69584bv64) == 69652bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 68992bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69660bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -99,28 +92,22 @@ procedure main(); requires (memory_load32_le(mem, $z_addr) == 0bv32); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 2008bv64) == 1bv8); - free requires (memory_load8_le(mem, 2009bv64) == 0bv8); - free requires (memory_load8_le(mem, 2010bv64) == 2bv8); - free requires (memory_load8_le(mem, 2011bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69660bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 2008bv64) == 131073bv32); + free requires (memory_load64_le(mem, 68992bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69000bv64) == 1792bv64); free requires (memory_load64_le(mem, 69584bv64) == 69652bv64); free requires (memory_load64_le(mem, 69592bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1792bv64); - free requires (memory_load64_le(mem, 68992bv64) == 1872bv64); - free ensures (memory_load8_le(mem, 2008bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2009bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2010bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2011bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69660bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69660bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 2008bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 68992bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69000bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69584bv64) == 69652bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 68992bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69660bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/switch/clang/switch.expected b/src/test/correct/switch/clang/switch.expected index f6a079d18..71ad0b07a 100644 --- a/src/test/correct/switch/clang/switch.expected +++ b/src/test/correct/switch/clang/switch.expected @@ -39,10 +39,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -52,12 +48,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1936bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1937bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1938bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1939bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1936bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -65,8 +58,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -83,20 +76,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_stack, NF, R31, R8, VF, ZF, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1936bv64) == 1bv8); - free requires (memory_load8_le(mem, 1937bv64) == 0bv8); - free requires (memory_load8_le(mem, 1938bv64) == 2bv8); - free requires (memory_load8_le(mem, 1939bv64) == 0bv8); + free requires (memory_load32_le(mem, 1936bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1936bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1937bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1938bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1939bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1936bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/switch/clang/switch_gtirb.expected b/src/test/correct/switch/clang/switch_gtirb.expected index bbfae8da5..9a0c4ad46 100644 --- a/src/test/correct/switch/clang/switch_gtirb.expected +++ b/src/test/correct/switch/clang/switch_gtirb.expected @@ -38,10 +38,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -51,12 +47,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1936bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1937bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1938bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1939bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1936bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -64,8 +57,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -82,20 +75,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_stack, NF, R31, R8, VF, ZF, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1936bv64) == 1bv8); - free requires (memory_load8_le(mem, 1937bv64) == 0bv8); - free requires (memory_load8_le(mem, 1938bv64) == 2bv8); - free requires (memory_load8_le(mem, 1939bv64) == 0bv8); + free requires (memory_load32_le(mem, 1936bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1936bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1937bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1938bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1939bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1936bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/switch/clang_O2/switch.expected b/src/test/correct/switch/clang_O2/switch.expected index 074ebb121..63ab24607 100644 --- a/src/test/correct/switch/clang_O2/switch.expected +++ b/src/test/correct/switch/clang_O2/switch.expected @@ -3,22 +3,19 @@ var {:extern} mem: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 1836bv64); function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1836bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1837bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1838bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1839bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1836bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -26,8 +23,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -43,18 +40,12 @@ procedure {:extern} guarantee_reflexive(); procedure main(); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1836bv64) == 1bv8); - free requires (memory_load8_le(mem, 1837bv64) == 0bv8); - free requires (memory_load8_le(mem, 1838bv64) == 2bv8); - free requires (memory_load8_le(mem, 1839bv64) == 0bv8); + free requires (memory_load32_le(mem, 1836bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1836bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1837bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1838bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1839bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1836bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/switch/clang_O2/switch_gtirb.expected b/src/test/correct/switch/clang_O2/switch_gtirb.expected index 1f07e5df7..6197b5d11 100644 --- a/src/test/correct/switch/clang_O2/switch_gtirb.expected +++ b/src/test/correct/switch/clang_O2/switch_gtirb.expected @@ -3,22 +3,19 @@ var {:extern} mem: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 1836bv64); function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1836bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1837bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1838bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1839bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1836bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -26,8 +23,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -43,18 +40,12 @@ procedure {:extern} guarantee_reflexive(); procedure main(); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1836bv64) == 1bv8); - free requires (memory_load8_le(mem, 1837bv64) == 0bv8); - free requires (memory_load8_le(mem, 1838bv64) == 2bv8); - free requires (memory_load8_le(mem, 1839bv64) == 0bv8); + free requires (memory_load32_le(mem, 1836bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1836bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1837bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1838bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1839bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1836bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/switch/gcc/switch.expected b/src/test/correct/switch/gcc/switch.expected index af33e269d..b13f88a63 100644 --- a/src/test/correct/switch/gcc/switch.expected +++ b/src/test/correct/switch/gcc/switch.expected @@ -39,10 +39,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -52,12 +48,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -65,8 +58,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -83,20 +76,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_stack, NF, R0, R31, VF, ZF, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1916bv64) == 1bv8); - free requires (memory_load8_le(mem, 1917bv64) == 0bv8); - free requires (memory_load8_le(mem, 1918bv64) == 2bv8); - free requires (memory_load8_le(mem, 1919bv64) == 0bv8); + free requires (memory_load32_le(mem, 1916bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/switch/gcc/switch_gtirb.expected b/src/test/correct/switch/gcc/switch_gtirb.expected index 36195f0ef..2d439a7c9 100644 --- a/src/test/correct/switch/gcc/switch_gtirb.expected +++ b/src/test/correct/switch/gcc/switch_gtirb.expected @@ -38,10 +38,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -51,12 +47,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -64,8 +57,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -82,20 +75,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_stack, NF, R0, R31, VF, ZF, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1916bv64) == 1bv8); - free requires (memory_load8_le(mem, 1917bv64) == 0bv8); - free requires (memory_load8_le(mem, 1918bv64) == 2bv8); - free requires (memory_load8_le(mem, 1919bv64) == 0bv8); + free requires (memory_load32_le(mem, 1916bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/switch/gcc_O2/switch.expected b/src/test/correct/switch/gcc_O2/switch.expected index f5fe2a468..a1aa0c66c 100644 --- a/src/test/correct/switch/gcc_O2/switch.expected +++ b/src/test/correct/switch/gcc_O2/switch.expected @@ -3,22 +3,19 @@ var {:extern} mem: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 1896bv64); function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -26,8 +23,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -43,18 +40,12 @@ procedure {:extern} guarantee_reflexive(); procedure main(); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/switch/gcc_O2/switch_gtirb.expected b/src/test/correct/switch/gcc_O2/switch_gtirb.expected index a87f9b510..aca746161 100644 --- a/src/test/correct/switch/gcc_O2/switch_gtirb.expected +++ b/src/test/correct/switch/gcc_O2/switch_gtirb.expected @@ -3,22 +3,19 @@ var {:extern} mem: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 1896bv64); function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -26,8 +23,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -43,18 +40,12 @@ procedure {:extern} guarantee_reflexive(); procedure main(); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/correct/syscall/clang/syscall.expected b/src/test/correct/syscall/clang/syscall.expected index 54a9d6eea..d7e0403c0 100644 --- a/src/test/correct/syscall/clang/syscall.expected +++ b/src/test/correct/syscall/clang/syscall.expected @@ -43,10 +43,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -58,12 +54,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1944bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1945bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1946bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1947bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1944bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); @@ -71,8 +64,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -87,18 +80,12 @@ procedure {:extern} guarantee_reflexive(); procedure fork(); modifies Gamma_R16, Gamma_R17, R16, R17; - free requires (memory_load8_le(mem, 1944bv64) == 1bv8); - free requires (memory_load8_le(mem, 1945bv64) == 0bv8); - free requires (memory_load8_le(mem, 1946bv64) == 2bv8); - free requires (memory_load8_le(mem, 1947bv64) == 0bv8); + free requires (memory_load32_le(mem, 1944bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1872bv64); free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); free requires (memory_load64_le(mem, 69680bv64) == 69680bv64); - free ensures (memory_load8_le(mem, 1944bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1945bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1946bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1947bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1944bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); @@ -108,10 +95,7 @@ procedure main(); modifies Gamma_R0, Gamma_R16, Gamma_R17, Gamma_R29, Gamma_R30, Gamma_R31, Gamma_stack, R0, R16, R17, R29, R30, R31, stack; free requires (memory_load64_le(mem, 69672bv64) == 0bv64); free requires (memory_load64_le(mem, 69680bv64) == 69680bv64); - free requires (memory_load8_le(mem, 1944bv64) == 1bv8); - free requires (memory_load8_le(mem, 1945bv64) == 0bv8); - free requires (memory_load8_le(mem, 1946bv64) == 2bv8); - free requires (memory_load8_le(mem, 1947bv64) == 0bv8); + free requires (memory_load32_le(mem, 1944bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1872bv64); free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); @@ -120,10 +104,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1944bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1945bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1946bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1947bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1944bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); diff --git a/src/test/correct/syscall/clang/syscall_gtirb.expected b/src/test/correct/syscall/clang/syscall_gtirb.expected index 1b662d317..40bab66af 100644 --- a/src/test/correct/syscall/clang/syscall_gtirb.expected +++ b/src/test/correct/syscall/clang/syscall_gtirb.expected @@ -18,7 +18,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 1944bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -47,10 +47,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -62,12 +58,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1944bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1945bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1946bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1947bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1944bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); @@ -75,8 +68,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -93,10 +86,7 @@ procedure main(); modifies Gamma_R0, Gamma_R16, Gamma_R17, Gamma_R29, Gamma_R30, Gamma_R31, Gamma_mem, Gamma_stack, R0, R16, R17, R29, R30, R31, mem, stack; free requires (memory_load64_le(mem, 69672bv64) == 0bv64); free requires (memory_load64_le(mem, 69680bv64) == 69680bv64); - free requires (memory_load8_le(mem, 1944bv64) == 1bv8); - free requires (memory_load8_le(mem, 1945bv64) == 0bv8); - free requires (memory_load8_le(mem, 1946bv64) == 2bv8); - free requires (memory_load8_le(mem, 1947bv64) == 0bv8); + free requires (memory_load32_le(mem, 1944bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1872bv64); free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); @@ -105,10 +95,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1944bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1945bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1946bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1947bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1944bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); @@ -155,18 +142,12 @@ implementation main() procedure FUN_610(); modifies Gamma_R16, Gamma_R17, Gamma_mem, R16, R17, mem; - free requires (memory_load8_le(mem, 1944bv64) == 1bv8); - free requires (memory_load8_le(mem, 1945bv64) == 0bv8); - free requires (memory_load8_le(mem, 1946bv64) == 2bv8); - free requires (memory_load8_le(mem, 1947bv64) == 0bv8); + free requires (memory_load32_le(mem, 1944bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1872bv64); free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); free requires (memory_load64_le(mem, 69680bv64) == 69680bv64); - free ensures (memory_load8_le(mem, 1944bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1945bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1946bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1947bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1944bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); @@ -185,18 +166,12 @@ implementation FUN_610() } procedure fork(); - free requires (memory_load8_le(mem, 1944bv64) == 1bv8); - free requires (memory_load8_le(mem, 1945bv64) == 0bv8); - free requires (memory_load8_le(mem, 1946bv64) == 2bv8); - free requires (memory_load8_le(mem, 1947bv64) == 0bv8); + free requires (memory_load32_le(mem, 1944bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1872bv64); free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); free requires (memory_load64_le(mem, 69680bv64) == 69680bv64); - free ensures (memory_load8_le(mem, 1944bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1945bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1946bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1947bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1944bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); diff --git a/src/test/correct/syscall/clang_O2/syscall_gtirb.expected b/src/test/correct/syscall/clang_O2/syscall_gtirb.expected index 0c9d7d48d..4ce3e54e5 100644 --- a/src/test/correct/syscall/clang_O2/syscall_gtirb.expected +++ b/src/test/correct/syscall/clang_O2/syscall_gtirb.expected @@ -6,7 +6,7 @@ var {:extern} R17: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 1900bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -15,22 +15,19 @@ function {:extern} gamma_load64(gammaMap: [bv64]bool, index: bv64) returns (bool (gammaMap[bvadd64(index, 7bv64)] && (gammaMap[bvadd64(index, 6bv64)] && (gammaMap[bvadd64(index, 5bv64)] && (gammaMap[bvadd64(index, 4bv64)] && (gammaMap[bvadd64(index, 3bv64)] && (gammaMap[bvadd64(index, 2bv64)] && (gammaMap[bvadd64(index, 1bv64)] && gammaMap[index]))))))) } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1900bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1901bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1902bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1903bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1900bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); @@ -38,8 +35,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -56,18 +53,12 @@ procedure main(); modifies Gamma_R16, Gamma_R17, Gamma_mem, R16, R17, mem; free requires (memory_load64_le(mem, 69672bv64) == 0bv64); free requires (memory_load64_le(mem, 69680bv64) == 69680bv64); - free requires (memory_load8_le(mem, 1900bv64) == 1bv8); - free requires (memory_load8_le(mem, 1901bv64) == 0bv8); - free requires (memory_load8_le(mem, 1902bv64) == 2bv8); - free requires (memory_load8_le(mem, 1903bv64) == 0bv8); + free requires (memory_load32_le(mem, 1900bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1872bv64); free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); free requires (memory_load64_le(mem, 69680bv64) == 69680bv64); - free ensures (memory_load8_le(mem, 1900bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1901bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1902bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1903bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1900bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); @@ -83,18 +74,12 @@ implementation main() procedure .L_610(); modifies Gamma_R16, Gamma_R17, Gamma_mem, R16, R17, mem; - free requires (memory_load8_le(mem, 1900bv64) == 1bv8); - free requires (memory_load8_le(mem, 1901bv64) == 0bv8); - free requires (memory_load8_le(mem, 1902bv64) == 2bv8); - free requires (memory_load8_le(mem, 1903bv64) == 0bv8); + free requires (memory_load32_le(mem, 1900bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1872bv64); free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); free requires (memory_load64_le(mem, 69680bv64) == 69680bv64); - free ensures (memory_load8_le(mem, 1900bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1901bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1902bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1903bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1900bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); @@ -113,18 +98,12 @@ implementation .L_610() } procedure fork(); - free requires (memory_load8_le(mem, 1900bv64) == 1bv8); - free requires (memory_load8_le(mem, 1901bv64) == 0bv8); - free requires (memory_load8_le(mem, 1902bv64) == 2bv8); - free requires (memory_load8_le(mem, 1903bv64) == 0bv8); + free requires (memory_load32_le(mem, 1900bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1872bv64); free requires (memory_load64_le(mem, 69072bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); free requires (memory_load64_le(mem, 69680bv64) == 69680bv64); - free ensures (memory_load8_le(mem, 1900bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1901bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1902bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1903bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1900bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); diff --git a/src/test/correct/syscall/gcc/syscall.expected b/src/test/correct/syscall/gcc/syscall.expected index eee352c79..9b5a6bfb5 100644 --- a/src/test/correct/syscall/gcc/syscall.expected +++ b/src/test/correct/syscall/gcc/syscall.expected @@ -43,10 +43,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -58,12 +54,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); @@ -71,8 +64,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -87,18 +80,12 @@ procedure {:extern} guarantee_reflexive(); procedure fork(); modifies Gamma_R16, Gamma_R17, R16, R17; - free requires (memory_load8_le(mem, 1932bv64) == 1bv8); - free requires (memory_load8_le(mem, 1933bv64) == 0bv8); - free requires (memory_load8_le(mem, 1934bv64) == 2bv8); - free requires (memory_load8_le(mem, 1935bv64) == 0bv8); + free requires (memory_load32_le(mem, 1932bv64) == 131073bv32); free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); @@ -108,10 +95,7 @@ procedure main(); modifies Gamma_R0, Gamma_R16, Gamma_R17, Gamma_R29, Gamma_R30, Gamma_R31, Gamma_stack, R0, R16, R17, R29, R30, R31, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1932bv64) == 1bv8); - free requires (memory_load8_le(mem, 1933bv64) == 0bv8); - free requires (memory_load8_le(mem, 1934bv64) == 2bv8); - free requires (memory_load8_le(mem, 1935bv64) == 0bv8); + free requires (memory_load32_le(mem, 1932bv64) == 131073bv32); free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); @@ -120,10 +104,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); diff --git a/src/test/correct/syscall/gcc/syscall_gtirb.expected b/src/test/correct/syscall/gcc/syscall_gtirb.expected index bff162b41..56a484a3f 100644 --- a/src/test/correct/syscall/gcc/syscall_gtirb.expected +++ b/src/test/correct/syscall/gcc/syscall_gtirb.expected @@ -18,7 +18,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 1932bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -47,10 +47,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -62,12 +58,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); @@ -75,8 +68,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -91,18 +84,12 @@ procedure {:extern} guarantee_reflexive(); procedure FUN_610(); modifies Gamma_R16, Gamma_R17, Gamma_mem, R16, R17, mem; - free requires (memory_load8_le(mem, 1932bv64) == 1bv8); - free requires (memory_load8_le(mem, 1933bv64) == 0bv8); - free requires (memory_load8_le(mem, 1934bv64) == 2bv8); - free requires (memory_load8_le(mem, 1935bv64) == 0bv8); + free requires (memory_load32_le(mem, 1932bv64) == 131073bv32); free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); @@ -124,10 +111,7 @@ procedure main(); modifies Gamma_R0, Gamma_R16, Gamma_R17, Gamma_R29, Gamma_R30, Gamma_R31, Gamma_mem, Gamma_stack, R0, R16, R17, R29, R30, R31, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1932bv64) == 1bv8); - free requires (memory_load8_le(mem, 1933bv64) == 0bv8); - free requires (memory_load8_le(mem, 1934bv64) == 2bv8); - free requires (memory_load8_le(mem, 1935bv64) == 0bv8); + free requires (memory_load32_le(mem, 1932bv64) == 131073bv32); free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); @@ -136,10 +120,7 @@ procedure main(); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R29 == old(R29)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); @@ -180,18 +161,12 @@ implementation main() } procedure fork(); - free requires (memory_load8_le(mem, 1932bv64) == 1bv8); - free requires (memory_load8_le(mem, 1933bv64) == 0bv8); - free requires (memory_load8_le(mem, 1934bv64) == 2bv8); - free requires (memory_load8_le(mem, 1935bv64) == 0bv8); + free requires (memory_load32_le(mem, 1932bv64) == 131073bv32); free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); diff --git a/src/test/correct/syscall/gcc_O2/syscall.expected b/src/test/correct/syscall/gcc_O2/syscall.expected index fcb66403d..fef381e05 100644 --- a/src/test/correct/syscall/gcc_O2/syscall.expected +++ b/src/test/correct/syscall/gcc_O2/syscall.expected @@ -7,22 +7,19 @@ var {:extern} mem: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 1960bv64); function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1960bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1961bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1962bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1963bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1960bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69008bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1600bv64); @@ -30,8 +27,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -48,18 +45,12 @@ procedure fork(); modifies Gamma_R16, Gamma_R17, R16, R17; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1960bv64) == 1bv8); - free requires (memory_load8_le(mem, 1961bv64) == 0bv8); - free requires (memory_load8_le(mem, 1962bv64) == 2bv8); - free requires (memory_load8_le(mem, 1963bv64) == 0bv8); + free requires (memory_load32_le(mem, 1960bv64) == 131073bv32); free requires (memory_load64_le(mem, 69008bv64) == 1936bv64); free requires (memory_load64_le(mem, 69016bv64) == 1856bv64); free requires (memory_load64_le(mem, 69616bv64) == 1600bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1960bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1961bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1962bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1963bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1960bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69008bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1600bv64); diff --git a/src/test/correct/syscall/gcc_O2/syscall_gtirb.expected b/src/test/correct/syscall/gcc_O2/syscall_gtirb.expected index ef48faa8c..c8f72f876 100644 --- a/src/test/correct/syscall/gcc_O2/syscall_gtirb.expected +++ b/src/test/correct/syscall/gcc_O2/syscall_gtirb.expected @@ -6,7 +6,7 @@ var {:extern} R17: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $_IO_stdin_used_addr: bv64; axiom ($_IO_stdin_used_addr == 1960bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -15,22 +15,19 @@ function {:extern} gamma_load64(gammaMap: [bv64]bool, index: bv64) returns (bool (gammaMap[bvadd64(index, 7bv64)] && (gammaMap[bvadd64(index, 6bv64)] && (gammaMap[bvadd64(index, 5bv64)] && (gammaMap[bvadd64(index, 4bv64)] && (gammaMap[bvadd64(index, 3bv64)] && (gammaMap[bvadd64(index, 2bv64)] && (gammaMap[bvadd64(index, 1bv64)] && gammaMap[index]))))))) } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1960bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1961bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1962bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1963bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1960bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69008bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1600bv64); @@ -38,8 +35,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -56,18 +53,12 @@ procedure main(); modifies Gamma_R16, Gamma_R17, Gamma_mem, R16, R17, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1960bv64) == 1bv8); - free requires (memory_load8_le(mem, 1961bv64) == 0bv8); - free requires (memory_load8_le(mem, 1962bv64) == 2bv8); - free requires (memory_load8_le(mem, 1963bv64) == 0bv8); + free requires (memory_load32_le(mem, 1960bv64) == 131073bv32); free requires (memory_load64_le(mem, 69008bv64) == 1936bv64); free requires (memory_load64_le(mem, 69016bv64) == 1856bv64); free requires (memory_load64_le(mem, 69616bv64) == 1600bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1960bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1961bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1962bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1963bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1960bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69008bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1600bv64); @@ -83,18 +74,12 @@ implementation main() procedure .L_610(); modifies Gamma_R16, Gamma_R17, Gamma_mem, R16, R17, mem; - free requires (memory_load8_le(mem, 1960bv64) == 1bv8); - free requires (memory_load8_le(mem, 1961bv64) == 0bv8); - free requires (memory_load8_le(mem, 1962bv64) == 2bv8); - free requires (memory_load8_le(mem, 1963bv64) == 0bv8); + free requires (memory_load32_le(mem, 1960bv64) == 131073bv32); free requires (memory_load64_le(mem, 69008bv64) == 1936bv64); free requires (memory_load64_le(mem, 69016bv64) == 1856bv64); free requires (memory_load64_le(mem, 69616bv64) == 1600bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1960bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1961bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1962bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1963bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1960bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69008bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1600bv64); @@ -113,18 +98,12 @@ implementation .L_610() } procedure fork(); - free requires (memory_load8_le(mem, 1960bv64) == 1bv8); - free requires (memory_load8_le(mem, 1961bv64) == 0bv8); - free requires (memory_load8_le(mem, 1962bv64) == 2bv8); - free requires (memory_load8_le(mem, 1963bv64) == 0bv8); + free requires (memory_load32_le(mem, 1960bv64) == 131073bv32); free requires (memory_load64_le(mem, 69008bv64) == 1936bv64); free requires (memory_load64_le(mem, 69016bv64) == 1856bv64); free requires (memory_load64_le(mem, 69616bv64) == 1600bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1960bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1961bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1962bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1963bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1960bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69008bv64) == 1936bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1856bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1600bv64); diff --git a/src/test/correct/using_gamma_conditional/clang/using_gamma_conditional.expected b/src/test/correct/using_gamma_conditional/clang/using_gamma_conditional.expected index 5531435ab..f85384bef 100644 --- a/src/test/correct/using_gamma_conditional/clang/using_gamma_conditional.expected +++ b/src/test/correct/using_gamma_conditional/clang/using_gamma_conditional.expected @@ -20,8 +20,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -47,10 +47,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -62,10 +58,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -101,20 +94,14 @@ procedure main(); requires (gamma_load32(Gamma_mem, $x_addr) == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/using_gamma_conditional/clang/using_gamma_conditional_gtirb.expected b/src/test/correct/using_gamma_conditional/clang/using_gamma_conditional_gtirb.expected index c1eb2bbc9..f5400493a 100644 --- a/src/test/correct/using_gamma_conditional/clang/using_gamma_conditional_gtirb.expected +++ b/src/test/correct/using_gamma_conditional/clang/using_gamma_conditional_gtirb.expected @@ -20,8 +20,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -46,10 +46,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -60,10 +56,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -99,20 +92,14 @@ procedure main(); requires (gamma_load32(Gamma_mem, $x_addr) == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/using_gamma_conditional/clang_O2/using_gamma_conditional.expected b/src/test/correct/using_gamma_conditional/clang_O2/using_gamma_conditional.expected index 3df066045..5bbfbd38f 100644 --- a/src/test/correct/using_gamma_conditional/clang_O2/using_gamma_conditional.expected +++ b/src/test/correct/using_gamma_conditional/clang_O2/using_gamma_conditional.expected @@ -16,8 +16,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -39,10 +39,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} {:bvbuiltin "sign_extend 1"} sign_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); @@ -50,10 +46,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -89,18 +82,12 @@ procedure main(); requires (gamma_load32(Gamma_mem, $x_addr) == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1852bv64) == 1bv8); - free requires (memory_load8_le(mem, 1853bv64) == 0bv8); - free requires (memory_load8_le(mem, 1854bv64) == 2bv8); - free requires (memory_load8_le(mem, 1855bv64) == 0bv8); + free requires (memory_load32_le(mem, 1852bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/using_gamma_conditional/clang_O2/using_gamma_conditional_gtirb.expected b/src/test/correct/using_gamma_conditional/clang_O2/using_gamma_conditional_gtirb.expected index 5480a19ca..739f58556 100644 --- a/src/test/correct/using_gamma_conditional/clang_O2/using_gamma_conditional_gtirb.expected +++ b/src/test/correct/using_gamma_conditional/clang_O2/using_gamma_conditional_gtirb.expected @@ -16,8 +16,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -38,20 +38,13 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -87,18 +80,12 @@ procedure main(); requires (gamma_load32(Gamma_mem, $x_addr) == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1852bv64) == 1bv8); - free requires (memory_load8_le(mem, 1853bv64) == 0bv8); - free requires (memory_load8_le(mem, 1854bv64) == 2bv8); - free requires (memory_load8_le(mem, 1855bv64) == 0bv8); + free requires (memory_load32_le(mem, 1852bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/correct/using_gamma_conditional/clang_pic/using_gamma_conditional.expected b/src/test/correct/using_gamma_conditional/clang_pic/using_gamma_conditional.expected index ca1acc493..6c064e72e 100644 --- a/src/test/correct/using_gamma_conditional/clang_pic/using_gamma_conditional.expected +++ b/src/test/correct/using_gamma_conditional/clang_pic/using_gamma_conditional.expected @@ -20,8 +20,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -51,10 +51,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -66,15 +62,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -106,26 +99,20 @@ procedure main(); requires (gamma_load32(Gamma_mem, $x_addr) == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/using_gamma_conditional/clang_pic/using_gamma_conditional_gtirb.expected b/src/test/correct/using_gamma_conditional/clang_pic/using_gamma_conditional_gtirb.expected index 988281be0..bab1430f0 100644 --- a/src/test/correct/using_gamma_conditional/clang_pic/using_gamma_conditional_gtirb.expected +++ b/src/test/correct/using_gamma_conditional/clang_pic/using_gamma_conditional_gtirb.expected @@ -20,8 +20,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -50,10 +50,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -64,15 +60,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -104,26 +97,20 @@ procedure main(); requires (gamma_load32(Gamma_mem, $x_addr) == true); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/correct/using_gamma_conditional/gcc/using_gamma_conditional.expected b/src/test/correct/using_gamma_conditional/gcc/using_gamma_conditional.expected index ca310ac5a..d15546bc6 100644 --- a/src/test/correct/using_gamma_conditional/gcc/using_gamma_conditional.expected +++ b/src/test/correct/using_gamma_conditional/gcc/using_gamma_conditional.expected @@ -14,8 +14,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -37,10 +37,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} {:bvbuiltin "sign_extend 1"} sign_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); @@ -48,10 +44,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -87,18 +80,12 @@ procedure main(); requires (gamma_load32(Gamma_mem, $x_addr) == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1868bv64) == 1bv8); - free requires (memory_load8_le(mem, 1869bv64) == 0bv8); - free requires (memory_load8_le(mem, 1870bv64) == 2bv8); - free requires (memory_load8_le(mem, 1871bv64) == 0bv8); + free requires (memory_load32_le(mem, 1868bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/using_gamma_conditional/gcc/using_gamma_conditional_gtirb.expected b/src/test/correct/using_gamma_conditional/gcc/using_gamma_conditional_gtirb.expected index 65aa5243e..092d40222 100644 --- a/src/test/correct/using_gamma_conditional/gcc/using_gamma_conditional_gtirb.expected +++ b/src/test/correct/using_gamma_conditional/gcc/using_gamma_conditional_gtirb.expected @@ -14,8 +14,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -36,20 +36,13 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -85,18 +78,12 @@ procedure main(); requires (gamma_load32(Gamma_mem, $x_addr) == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1868bv64) == 1bv8); - free requires (memory_load8_le(mem, 1869bv64) == 0bv8); - free requires (memory_load8_le(mem, 1870bv64) == 2bv8); - free requires (memory_load8_le(mem, 1871bv64) == 0bv8); + free requires (memory_load32_le(mem, 1868bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1868bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1869bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1870bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1871bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1868bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/correct/using_gamma_conditional/gcc_pic/using_gamma_conditional.expected b/src/test/correct/using_gamma_conditional/gcc_pic/using_gamma_conditional.expected index c8746d547..e36ae841a 100644 --- a/src/test/correct/using_gamma_conditional/gcc_pic/using_gamma_conditional.expected +++ b/src/test/correct/using_gamma_conditional/gcc_pic/using_gamma_conditional.expected @@ -14,8 +14,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -41,10 +41,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} {:bvbuiltin "sign_extend 1"} sign_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); @@ -52,15 +48,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -92,24 +85,18 @@ procedure main(); requires (gamma_load32(Gamma_mem, $x_addr) == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1932bv64) == 1bv8); - free requires (memory_load8_le(mem, 1933bv64) == 0bv8); - free requires (memory_load8_le(mem, 1934bv64) == 2bv8); - free requires (memory_load8_le(mem, 1935bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1932bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/using_gamma_conditional/gcc_pic/using_gamma_conditional_gtirb.expected b/src/test/correct/using_gamma_conditional/gcc_pic/using_gamma_conditional_gtirb.expected index 3fc47285f..b1b14067c 100644 --- a/src/test/correct/using_gamma_conditional/gcc_pic/using_gamma_conditional_gtirb.expected +++ b/src/test/correct/using_gamma_conditional/gcc_pic/using_gamma_conditional_gtirb.expected @@ -14,8 +14,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd32(bv32, bv32) returns (bv32); @@ -40,25 +40,18 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (bv33); function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (old(gamma_load32(Gamma_mem, $x_addr)) ==> gamma_load32(Gamma_mem, $x_addr)); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -90,24 +83,18 @@ procedure main(); requires (gamma_load32(Gamma_mem, $x_addr) == true); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1932bv64) == 1bv8); - free requires (memory_load8_le(mem, 1933bv64) == 0bv8); - free requires (memory_load8_le(mem, 1934bv64) == 2bv8); - free requires (memory_load8_le(mem, 1935bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1932bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); - free ensures (memory_load8_le(mem, 1932bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1933bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1934bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1935bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 1932bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/correct/using_gamma_write_z/clang/using_gamma_write_z.expected b/src/test/correct/using_gamma_write_z/clang/using_gamma_write_z.expected index 880ccabaf..9d59e8152 100644 --- a/src/test/correct/using_gamma_write_z/clang/using_gamma_write_z.expected +++ b/src/test/correct/using_gamma_write_z/clang/using_gamma_write_z.expected @@ -10,8 +10,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,10 +39,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == memory_load32_le(mem, $x_addr)) && (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -82,18 +75,12 @@ procedure main(); requires ((memory_load32_le(mem, $z_addr) == 0bv32) ==> gamma_load32(Gamma_mem, $x_addr)); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1852bv64) == 1bv8); - free requires (memory_load8_le(mem, 1853bv64) == 0bv8); - free requires (memory_load8_le(mem, 1854bv64) == 2bv8); - free requires (memory_load8_le(mem, 1855bv64) == 0bv8); + free requires (memory_load32_le(mem, 1852bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -111,7 +98,6 @@ implementation main() Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, bvadd64(R9, 52bv64), R8[32:0]), gamma_store32(Gamma_mem, bvadd64(R9, 52bv64), Gamma_R8); assert ((bvadd64(R9, 52bv64) == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); - assert (Gamma_x_old ==> gamma_load32(Gamma_mem, $x_addr)); assume {:captureState "%000002ce"} true; R0, Gamma_R0 := 0bv64, true; goto main_basil_return; diff --git a/src/test/correct/using_gamma_write_z/clang/using_gamma_write_z_gtirb.expected b/src/test/correct/using_gamma_write_z/clang/using_gamma_write_z_gtirb.expected index 91ed7e5d7..740065fd5 100644 --- a/src/test/correct/using_gamma_write_z/clang/using_gamma_write_z_gtirb.expected +++ b/src/test/correct/using_gamma_write_z/clang/using_gamma_write_z_gtirb.expected @@ -10,8 +10,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,10 +39,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == memory_load32_le(mem, $x_addr)) && (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -82,18 +75,12 @@ procedure main(); requires ((memory_load32_le(mem, $z_addr) == 0bv32) ==> gamma_load32(Gamma_mem, $x_addr)); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1852bv64) == 1bv8); - free requires (memory_load8_le(mem, 1853bv64) == 0bv8); - free requires (memory_load8_le(mem, 1854bv64) == 2bv8); - free requires (memory_load8_le(mem, 1855bv64) == 0bv8); + free requires (memory_load32_le(mem, 1852bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1852bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1853bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1854bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1855bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1852bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -111,7 +98,6 @@ implementation main() Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, bvadd64(R9, 52bv64), R8[32:0]), gamma_store32(Gamma_mem, bvadd64(R9, 52bv64), Gamma_R8); assert ((bvadd64(R9, 52bv64) == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); - assert (Gamma_x_old ==> gamma_load32(Gamma_mem, $x_addr)); assume {:captureState "1820$0"} true; R0, Gamma_R0 := 0bv64, true; goto main_basil_return; diff --git a/src/test/correct/using_gamma_write_z/clang_pic/using_gamma_write_z.expected b/src/test/correct/using_gamma_write_z/clang_pic/using_gamma_write_z.expected index 8787d7ffc..8525eea2b 100644 --- a/src/test/correct/using_gamma_write_z/clang_pic/using_gamma_write_z.expected +++ b/src/test/correct/using_gamma_write_z/clang_pic/using_gamma_write_z.expected @@ -10,8 +10,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -35,10 +35,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -47,15 +43,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == memory_load32_le(mem, $x_addr)) && (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -87,24 +80,18 @@ procedure main(); requires ((memory_load32_le(mem, $z_addr) == 0bv32) ==> gamma_load32(Gamma_mem, $x_addr)); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { @@ -120,7 +107,6 @@ implementation main() Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R9, R8[32:0]), gamma_store32(Gamma_mem, R9, Gamma_R8); assert ((R9 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); - assert (Gamma_x_old ==> gamma_load32(Gamma_mem, $x_addr)); assume {:captureState "%000002d9"} true; R0, Gamma_R0 := 0bv64, true; goto main_basil_return; diff --git a/src/test/correct/using_gamma_write_z/clang_pic/using_gamma_write_z_gtirb.expected b/src/test/correct/using_gamma_write_z/clang_pic/using_gamma_write_z_gtirb.expected index 797dd348e..8062aae34 100644 --- a/src/test/correct/using_gamma_write_z/clang_pic/using_gamma_write_z_gtirb.expected +++ b/src/test/correct/using_gamma_write_z/clang_pic/using_gamma_write_z_gtirb.expected @@ -10,8 +10,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69684bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -35,10 +35,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -47,15 +43,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == memory_load32_le(mem, $x_addr)) && (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -87,24 +80,18 @@ procedure main(); requires ((memory_load32_le(mem, $z_addr) == 0bv32) ==> gamma_load32(Gamma_mem, $x_addr)); free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); free requires (memory_load64_le(mem, 69056bv64) == 1872bv64); free requires (memory_load64_le(mem, 69064bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69056bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69064bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { @@ -120,7 +107,6 @@ implementation main() Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R9, R8[32:0]), gamma_store32(Gamma_mem, R9, Gamma_R8); assert ((R9 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); - assert (Gamma_x_old ==> gamma_load32(Gamma_mem, $x_addr)); assume {:captureState "1888$0"} true; R0, Gamma_R0 := 0bv64, true; goto main_basil_return; diff --git a/src/test/correct/using_gamma_write_z/gcc/using_gamma_write_z.expected b/src/test/correct/using_gamma_write_z/gcc/using_gamma_write_z.expected index 7ce4c193c..2fe980180 100644 --- a/src/test/correct/using_gamma_write_z/gcc/using_gamma_write_z.expected +++ b/src/test/correct/using_gamma_write_z/gcc/using_gamma_write_z.expected @@ -8,8 +8,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -41,10 +37,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == memory_load32_le(mem, $x_addr)) && (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -80,18 +73,12 @@ procedure main(); requires ((memory_load32_le(mem, $z_addr) == 0bv32) ==> gamma_load32(Gamma_mem, $x_addr)); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -110,7 +97,6 @@ implementation main() Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, R1[32:0]), gamma_store32(Gamma_mem, R0, Gamma_R1); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); - assert (Gamma_x_old ==> gamma_load32(Gamma_mem, $x_addr)); assume {:captureState "%000002d8"} true; R0, Gamma_R0 := 0bv64, true; goto main_basil_return; diff --git a/src/test/correct/using_gamma_write_z/gcc/using_gamma_write_z_gtirb.expected b/src/test/correct/using_gamma_write_z/gcc/using_gamma_write_z_gtirb.expected index e3fef2e26..da53252e5 100644 --- a/src/test/correct/using_gamma_write_z/gcc/using_gamma_write_z_gtirb.expected +++ b/src/test/correct/using_gamma_write_z/gcc/using_gamma_write_z_gtirb.expected @@ -8,8 +8,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -41,10 +37,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == memory_load32_le(mem, $x_addr)) && (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -80,18 +73,12 @@ procedure main(); requires ((memory_load32_le(mem, $z_addr) == 0bv32) ==> gamma_load32(Gamma_mem, $x_addr)); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -110,7 +97,6 @@ implementation main() Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, R1[32:0]), gamma_store32(Gamma_mem, R0, Gamma_R1); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); - assert (Gamma_x_old ==> gamma_load32(Gamma_mem, $x_addr)); assume {:captureState "1824$0"} true; R0, Gamma_R0 := 0bv64, true; goto main_basil_return; diff --git a/src/test/correct/using_gamma_write_z/gcc_O2/using_gamma_write_z.expected b/src/test/correct/using_gamma_write_z/gcc_O2/using_gamma_write_z.expected index a5be3e625..d253f2a2e 100644 --- a/src/test/correct/using_gamma_write_z/gcc_O2/using_gamma_write_z.expected +++ b/src/test/correct/using_gamma_write_z/gcc_O2/using_gamma_write_z.expected @@ -10,8 +10,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69656bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,10 +39,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == memory_load32_le(mem, $x_addr)) && (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -82,18 +75,12 @@ procedure main(); requires ((memory_load32_le(mem, $z_addr) == 0bv32) ==> gamma_load32(Gamma_mem, $x_addr)); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -112,7 +99,6 @@ implementation main() Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, bvadd64(R1, 20bv64), R2[32:0]), gamma_store32(Gamma_mem, bvadd64(R1, 20bv64), Gamma_R2); assert ((bvadd64(R1, 20bv64) == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); - assert (Gamma_x_old ==> gamma_load32(Gamma_mem, $x_addr)); assume {:captureState "%000001bd"} true; goto main_basil_return; main_basil_return: diff --git a/src/test/correct/using_gamma_write_z/gcc_O2/using_gamma_write_z_gtirb.expected b/src/test/correct/using_gamma_write_z/gcc_O2/using_gamma_write_z_gtirb.expected index 6ae6f75eb..0f13da53f 100644 --- a/src/test/correct/using_gamma_write_z/gcc_O2/using_gamma_write_z_gtirb.expected +++ b/src/test/correct/using_gamma_write_z/gcc_O2/using_gamma_write_z_gtirb.expected @@ -10,8 +10,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69656bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -43,10 +39,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == memory_load32_le(mem, $x_addr)) && (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -82,18 +75,12 @@ procedure main(); requires ((memory_load32_le(mem, $z_addr) == 0bv32) ==> gamma_load32(Gamma_mem, $x_addr)); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -112,7 +99,6 @@ implementation main() Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, bvadd64(R1, 20bv64), R2[32:0]), gamma_store32(Gamma_mem, bvadd64(R1, 20bv64), Gamma_R2); assert ((bvadd64(R1, 20bv64) == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); - assert (Gamma_x_old ==> gamma_load32(Gamma_mem, $x_addr)); assume {:captureState "1548$0"} true; goto main_basil_return; main_basil_return: diff --git a/src/test/correct/using_gamma_write_z/gcc_pic/using_gamma_write_z.expected b/src/test/correct/using_gamma_write_z/gcc_pic/using_gamma_write_z.expected index addc3d272..23cde8fa0 100644 --- a/src/test/correct/using_gamma_write_z/gcc_pic/using_gamma_write_z.expected +++ b/src/test/correct/using_gamma_write_z/gcc_pic/using_gamma_write_z.expected @@ -8,8 +8,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -45,15 +41,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == memory_load32_le(mem, $x_addr)) && (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -85,24 +78,18 @@ procedure main(); requires ((memory_load32_le(mem, $z_addr) == 0bv32) ==> gamma_load32(Gamma_mem, $x_addr)); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69600bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { @@ -118,7 +105,6 @@ implementation main() Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, R1[32:0]), gamma_store32(Gamma_mem, R0, Gamma_R1); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); - assert (Gamma_x_old ==> gamma_load32(Gamma_mem, $x_addr)); assume {:captureState "%000002d9"} true; R0, Gamma_R0 := 0bv64, true; goto main_basil_return; diff --git a/src/test/correct/using_gamma_write_z/gcc_pic/using_gamma_write_z_gtirb.expected b/src/test/correct/using_gamma_write_z/gcc_pic/using_gamma_write_z_gtirb.expected index fda018aa5..e70802918 100644 --- a/src/test/correct/using_gamma_write_z/gcc_pic/using_gamma_write_z_gtirb.expected +++ b/src/test/correct/using_gamma_write_z/gcc_pic/using_gamma_write_z_gtirb.expected @@ -8,8 +8,8 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { - (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(memory, $z_addr) == 0bv32) else false)) +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { + (if (index == $z_addr) then true else (if (index == $x_addr) then (memory_load32_le(mem$in, $z_addr) == 0bv32) else false)) } function {:extern} {:bvbuiltin "bvadd"} bvadd64(bv64, bv64) returns (bv64); @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -45,15 +41,12 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures ((old(memory_load32_le(mem, $x_addr)) == memory_load32_le(mem, $x_addr)) && (old(memory_load32_le(mem, $z_addr)) == memory_load32_le(mem, $z_addr))); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -85,24 +78,18 @@ procedure main(); requires ((memory_load32_le(mem, $z_addr) == 0bv32) ==> gamma_load32(Gamma_mem, $x_addr)); free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1920bv64) == 1bv8); - free requires (memory_load8_le(mem, 1921bv64) == 0bv8); - free requires (memory_load8_le(mem, 1922bv64) == 2bv8); - free requires (memory_load8_le(mem, 1923bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1920bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); free requires (memory_load64_le(mem, 69016bv64) == 1792bv64); free requires (memory_load64_le(mem, 69600bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69008bv64) == 1872bv64); - free ensures (memory_load8_le(mem, 1920bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1921bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1922bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1923bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 1920bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69016bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69600bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69008bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { @@ -118,7 +105,6 @@ implementation main() Gamma_x_old := (gamma_load32(Gamma_mem, $x_addr) || L(mem, $x_addr)); mem, Gamma_mem := memory_store32_le(mem, R0, R1[32:0]), gamma_store32(Gamma_mem, R0, Gamma_R1); assert ((R0 == $z_addr) ==> (L(mem, $x_addr) ==> Gamma_x_old)); - assert (Gamma_x_old ==> gamma_load32(Gamma_mem, $x_addr)); assume {:captureState "1888$0"} true; R0, Gamma_R0 := 0bv64, true; goto main_basil_return; diff --git a/src/test/incorrect/basicassign/clang/basicassign.expected b/src/test/incorrect/basicassign/clang/basicassign.expected index 60367a8c0..02b8d1f0e 100644 --- a/src/test/incorrect/basicassign/clang/basicassign.expected +++ b/src/test/incorrect/basicassign/clang/basicassign.expected @@ -16,7 +16,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69692bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else (if (index == $x_addr) then false else false))) } @@ -37,10 +37,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,10 +46,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1888bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1889bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1890bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1891bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1888bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -83,18 +76,12 @@ procedure main(); modifies Gamma_R0, Gamma_R10, Gamma_R11, Gamma_R8, Gamma_R9, Gamma_mem, R0, R10, R11, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1888bv64) == 1bv8); - free requires (memory_load8_le(mem, 1889bv64) == 0bv8); - free requires (memory_load8_le(mem, 1890bv64) == 2bv8); - free requires (memory_load8_le(mem, 1891bv64) == 0bv8); + free requires (memory_load32_le(mem, 1888bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1888bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1889bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1890bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1891bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1888bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign/clang/basicassign_gtirb.expected b/src/test/incorrect/basicassign/clang/basicassign_gtirb.expected index 7338ab0ff..563cb3087 100644 --- a/src/test/incorrect/basicassign/clang/basicassign_gtirb.expected +++ b/src/test/incorrect/basicassign/clang/basicassign_gtirb.expected @@ -16,7 +16,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69692bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else (if (index == $x_addr) then false else false))) } @@ -37,10 +37,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,10 +46,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1888bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1889bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1890bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1891bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1888bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -83,18 +76,12 @@ procedure main(); modifies Gamma_R0, Gamma_R10, Gamma_R11, Gamma_R8, Gamma_R9, Gamma_mem, R0, R10, R11, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1888bv64) == 1bv8); - free requires (memory_load8_le(mem, 1889bv64) == 0bv8); - free requires (memory_load8_le(mem, 1890bv64) == 2bv8); - free requires (memory_load8_le(mem, 1891bv64) == 0bv8); + free requires (memory_load32_le(mem, 1888bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1888bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1889bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1890bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1891bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1888bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign/clang_O2/basicassign.expected b/src/test/incorrect/basicassign/clang_O2/basicassign.expected index 3de01bcab..8b0e755c9 100644 --- a/src/test/incorrect/basicassign/clang_O2/basicassign.expected +++ b/src/test/incorrect/basicassign/clang_O2/basicassign.expected @@ -14,7 +14,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69692bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else (if (index == $x_addr) then false else false))) } @@ -35,10 +35,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -48,10 +44,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1864bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1865bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1866bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1867bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1864bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -81,18 +74,12 @@ procedure main(); modifies Gamma_R0, Gamma_R10, Gamma_R8, Gamma_R9, Gamma_mem, R0, R10, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1864bv64) == 1bv8); - free requires (memory_load8_le(mem, 1865bv64) == 0bv8); - free requires (memory_load8_le(mem, 1866bv64) == 2bv8); - free requires (memory_load8_le(mem, 1867bv64) == 0bv8); + free requires (memory_load32_le(mem, 1864bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1864bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1865bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1866bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1867bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1864bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign/clang_O2/basicassign_gtirb.expected b/src/test/incorrect/basicassign/clang_O2/basicassign_gtirb.expected index ecdc0fe3d..abae7f906 100644 --- a/src/test/incorrect/basicassign/clang_O2/basicassign_gtirb.expected +++ b/src/test/incorrect/basicassign/clang_O2/basicassign_gtirb.expected @@ -14,7 +14,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69692bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else (if (index == $x_addr) then false else false))) } @@ -35,10 +35,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -48,10 +44,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1864bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1865bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1866bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1867bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1864bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -81,18 +74,12 @@ procedure main(); modifies Gamma_R0, Gamma_R10, Gamma_R8, Gamma_R9, Gamma_mem, R0, R10, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1864bv64) == 1bv8); - free requires (memory_load8_le(mem, 1865bv64) == 0bv8); - free requires (memory_load8_le(mem, 1866bv64) == 2bv8); - free requires (memory_load8_le(mem, 1867bv64) == 0bv8); + free requires (memory_load32_le(mem, 1864bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1864bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1865bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1866bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1867bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1864bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign/clang_pic/basicassign.expected b/src/test/incorrect/basicassign/clang_pic/basicassign.expected index 2482ec834..df8da4c63 100644 --- a/src/test/incorrect/basicassign/clang_pic/basicassign.expected +++ b/src/test/incorrect/basicassign/clang_pic/basicassign.expected @@ -16,7 +16,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69692bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else (if (index == $x_addr) then false else false))) } @@ -41,10 +41,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -54,17 +50,14 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69048bv64) == 1792bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69040bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69048bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69560bv64) == 69692bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69560bv64) == 69692bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -90,28 +83,22 @@ procedure main(); modifies Gamma_R0, Gamma_R10, Gamma_R11, Gamma_R8, Gamma_R9, Gamma_mem, R0, R10, R11, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69048bv64) == 1792bv64); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); free requires (memory_load64_le(mem, 69040bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69048bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69560bv64) == 69692bv64); free requires (memory_load64_le(mem, 69568bv64) == 69688bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69560bv64) == 69692bv64); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69048bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69040bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69048bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69560bv64) == 69692bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69560bv64) == 69692bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/incorrect/basicassign/clang_pic/basicassign_gtirb.expected b/src/test/incorrect/basicassign/clang_pic/basicassign_gtirb.expected index 5584a3882..60c48f7f5 100644 --- a/src/test/incorrect/basicassign/clang_pic/basicassign_gtirb.expected +++ b/src/test/incorrect/basicassign/clang_pic/basicassign_gtirb.expected @@ -16,7 +16,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69692bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else (if (index == $x_addr) then false else false))) } @@ -41,10 +41,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -54,17 +50,14 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69048bv64) == 1792bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69040bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69048bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69560bv64) == 69692bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69560bv64) == 69692bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -90,28 +83,22 @@ procedure main(); modifies Gamma_R0, Gamma_R10, Gamma_R11, Gamma_R8, Gamma_R9, Gamma_mem, R0, R10, R11, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69048bv64) == 1792bv64); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); free requires (memory_load64_le(mem, 69040bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69048bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69560bv64) == 69692bv64); free requires (memory_load64_le(mem, 69568bv64) == 69688bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69560bv64) == 69692bv64); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69048bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69040bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69048bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69560bv64) == 69692bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69560bv64) == 69692bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/incorrect/basicassign/gcc/basicassign.expected b/src/test/incorrect/basicassign/gcc/basicassign.expected index 404564f37..94068d3c2 100644 --- a/src/test/incorrect/basicassign/gcc/basicassign.expected +++ b/src/test/incorrect/basicassign/gcc/basicassign.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else (if (index == $x_addr) then false else false))) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -44,10 +40,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1948bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1949bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1950bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1951bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1948bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -77,18 +70,12 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1948bv64) == 1bv8); - free requires (memory_load8_le(mem, 1949bv64) == 0bv8); - free requires (memory_load8_le(mem, 1950bv64) == 2bv8); - free requires (memory_load8_le(mem, 1951bv64) == 0bv8); + free requires (memory_load32_le(mem, 1948bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1948bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1949bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1950bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1951bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1948bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign/gcc/basicassign_gtirb.expected b/src/test/incorrect/basicassign/gcc/basicassign_gtirb.expected index 095bf3adc..19dc9e4c2 100644 --- a/src/test/incorrect/basicassign/gcc/basicassign_gtirb.expected +++ b/src/test/incorrect/basicassign/gcc/basicassign_gtirb.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else (if (index == $x_addr) then false else false))) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -44,10 +40,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1948bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1949bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1950bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1951bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1948bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -77,18 +70,12 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1948bv64) == 1bv8); - free requires (memory_load8_le(mem, 1949bv64) == 0bv8); - free requires (memory_load8_le(mem, 1950bv64) == 2bv8); - free requires (memory_load8_le(mem, 1951bv64) == 0bv8); + free requires (memory_load32_le(mem, 1948bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1948bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1949bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1950bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1951bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1948bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign/gcc_O2/basicassign.expected b/src/test/incorrect/basicassign/gcc_O2/basicassign.expected index 49c578a07..d330626d5 100644 --- a/src/test/incorrect/basicassign/gcc_O2/basicassign.expected +++ b/src/test/incorrect/basicassign/gcc_O2/basicassign.expected @@ -12,7 +12,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69656bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69660bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else (if (index == $x_addr) then false else false))) } @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -46,10 +42,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -79,18 +72,12 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R2, Gamma_mem, R0, R1, R2, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/incorrect/basicassign/gcc_O2/basicassign_gtirb.expected b/src/test/incorrect/basicassign/gcc_O2/basicassign_gtirb.expected index 9669ae7ca..48dec0790 100644 --- a/src/test/incorrect/basicassign/gcc_O2/basicassign_gtirb.expected +++ b/src/test/incorrect/basicassign/gcc_O2/basicassign_gtirb.expected @@ -12,7 +12,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69656bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69660bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else (if (index == $x_addr) then false else false))) } @@ -33,10 +33,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -46,10 +42,7 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -79,18 +72,12 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R2, Gamma_mem, R0, R1, R2, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/incorrect/basicassign/gcc_pic/basicassign.expected b/src/test/incorrect/basicassign/gcc_pic/basicassign.expected index f87155f7a..e58a0dbe9 100644 --- a/src/test/incorrect/basicassign/gcc_pic/basicassign.expected +++ b/src/test/incorrect/basicassign/gcc_pic/basicassign.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else (if (index == $x_addr) then false else false))) } @@ -35,10 +35,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -48,17 +44,14 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 2012bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2013bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2014bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2015bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69660bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 2012bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 68992bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69000bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69584bv64) == 69656bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 68992bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69660bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -84,28 +77,22 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 2012bv64) == 1bv8); - free requires (memory_load8_le(mem, 2013bv64) == 0bv8); - free requires (memory_load8_le(mem, 2014bv64) == 2bv8); - free requires (memory_load8_le(mem, 2015bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69660bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 2012bv64) == 131073bv32); + free requires (memory_load64_le(mem, 68992bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69000bv64) == 1792bv64); free requires (memory_load64_le(mem, 69584bv64) == 69656bv64); free requires (memory_load64_le(mem, 69592bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1792bv64); - free requires (memory_load64_le(mem, 68992bv64) == 1872bv64); - free ensures (memory_load8_le(mem, 2012bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2013bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2014bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2015bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69660bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69660bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 2012bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 68992bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69000bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69584bv64) == 69656bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 68992bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69660bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/incorrect/basicassign/gcc_pic/basicassign_gtirb.expected b/src/test/incorrect/basicassign/gcc_pic/basicassign_gtirb.expected index 5d4ca47a0..79aaba0e7 100644 --- a/src/test/incorrect/basicassign/gcc_pic/basicassign_gtirb.expected +++ b/src/test/incorrect/basicassign/gcc_pic/basicassign_gtirb.expected @@ -10,7 +10,7 @@ const {:extern} $x_addr: bv64; axiom ($x_addr == 69652bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else (if (index == $x_addr) then false else false))) } @@ -35,10 +35,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -48,17 +44,14 @@ procedure {:extern} rely(); modifies Gamma_mem, mem; ensures (forall i: bv64 :: (((mem[i] == old(mem[i])) ==> (Gamma_mem[i] == old(Gamma_mem[i]))))); ensures (memory_load32_le(mem, $x_addr) == old(memory_load32_le(mem, $x_addr))); - free ensures (memory_load8_le(mem, 2012bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2013bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2014bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2015bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69660bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 2012bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 68992bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69000bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69584bv64) == 69656bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 68992bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69660bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; @@ -84,28 +77,22 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_mem, R0, R1, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 2012bv64) == 1bv8); - free requires (memory_load8_le(mem, 2013bv64) == 0bv8); - free requires (memory_load8_le(mem, 2014bv64) == 2bv8); - free requires (memory_load8_le(mem, 2015bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69660bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 2012bv64) == 131073bv32); + free requires (memory_load64_le(mem, 68992bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69000bv64) == 1792bv64); free requires (memory_load64_le(mem, 69584bv64) == 69656bv64); free requires (memory_load64_le(mem, 69592bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1792bv64); - free requires (memory_load64_le(mem, 68992bv64) == 1872bv64); - free ensures (memory_load8_le(mem, 2012bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2013bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2014bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2015bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69660bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69660bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); + free ensures (memory_load32_le(mem, 2012bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 68992bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69000bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69584bv64) == 69656bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1792bv64); - free ensures (memory_load64_le(mem, 68992bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69660bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/incorrect/basicassign1/clang/basicassign1.expected b/src/test/incorrect/basicassign1/clang/basicassign1.expected index afb87ecdb..248887914 100644 --- a/src/test/incorrect/basicassign1/clang/basicassign1.expected +++ b/src/test/incorrect/basicassign1/clang/basicassign1.expected @@ -14,7 +14,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else false) } @@ -35,10 +35,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -46,12 +42,9 @@ function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -59,8 +52,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -77,20 +70,14 @@ procedure main(); modifies Gamma_R0, Gamma_R10, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_mem, Gamma_stack, R0, R10, R31, R8, R9, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1892bv64) == 1bv8); - free requires (memory_load8_le(mem, 1893bv64) == 0bv8); - free requires (memory_load8_le(mem, 1894bv64) == 2bv8); - free requires (memory_load8_le(mem, 1895bv64) == 0bv8); + free requires (memory_load32_le(mem, 1892bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign1/clang/basicassign1_gtirb.expected b/src/test/incorrect/basicassign1/clang/basicassign1_gtirb.expected index 27c5c8876..faf8668f2 100644 --- a/src/test/incorrect/basicassign1/clang/basicassign1_gtirb.expected +++ b/src/test/incorrect/basicassign1/clang/basicassign1_gtirb.expected @@ -14,7 +14,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else false) } @@ -35,10 +35,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -46,12 +42,9 @@ function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -59,8 +52,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -77,20 +70,14 @@ procedure main(); modifies Gamma_R0, Gamma_R10, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_mem, Gamma_stack, R0, R10, R31, R8, R9, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1892bv64) == 1bv8); - free requires (memory_load8_le(mem, 1893bv64) == 0bv8); - free requires (memory_load8_le(mem, 1894bv64) == 2bv8); - free requires (memory_load8_le(mem, 1895bv64) == 0bv8); + free requires (memory_load32_le(mem, 1892bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign1/clang_O2/basicassign1.expected b/src/test/incorrect/basicassign1/clang_O2/basicassign1.expected index af9853960..646c9e583 100644 --- a/src/test/incorrect/basicassign1/clang_O2/basicassign1.expected +++ b/src/test/incorrect/basicassign1/clang_O2/basicassign1.expected @@ -8,7 +8,7 @@ var {:extern} R9: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else false) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -40,12 +36,9 @@ function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -53,8 +46,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -71,18 +64,12 @@ procedure main(); modifies Gamma_R0, Gamma_R8, Gamma_R9, Gamma_mem, R0, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign1/clang_O2/basicassign1_gtirb.expected b/src/test/incorrect/basicassign1/clang_O2/basicassign1_gtirb.expected index d74bb0aff..82b0afd4c 100644 --- a/src/test/incorrect/basicassign1/clang_O2/basicassign1_gtirb.expected +++ b/src/test/incorrect/basicassign1/clang_O2/basicassign1_gtirb.expected @@ -8,7 +8,7 @@ var {:extern} R9: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else false) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -40,12 +36,9 @@ function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -53,8 +46,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -71,18 +64,12 @@ procedure main(); modifies Gamma_R0, Gamma_R8, Gamma_R9, Gamma_mem, R0, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign1/clang_pic/basicassign1.expected b/src/test/incorrect/basicassign1/clang_pic/basicassign1.expected index 0158c8e2d..4764ea3d6 100644 --- a/src/test/incorrect/basicassign1/clang_pic/basicassign1.expected +++ b/src/test/incorrect/basicassign1/clang_pic/basicassign1.expected @@ -14,7 +14,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else false) } @@ -39,10 +39,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,23 +46,20 @@ function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -83,28 +76,22 @@ procedure main(); modifies Gamma_R0, Gamma_R10, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_mem, Gamma_stack, R0, R10, R31, R8, R9, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69688bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/incorrect/basicassign1/clang_pic/basicassign1_gtirb.expected b/src/test/incorrect/basicassign1/clang_pic/basicassign1_gtirb.expected index 4aa8567fc..2afdf2baf 100644 --- a/src/test/incorrect/basicassign1/clang_pic/basicassign1_gtirb.expected +++ b/src/test/incorrect/basicassign1/clang_pic/basicassign1_gtirb.expected @@ -14,7 +14,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else false) } @@ -39,10 +39,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,23 +46,20 @@ function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -83,28 +76,22 @@ procedure main(); modifies Gamma_R0, Gamma_R10, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_mem, Gamma_stack, R0, R10, R31, R8, R9, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69688bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/incorrect/basicassign1/gcc/basicassign1.expected b/src/test/incorrect/basicassign1/gcc/basicassign1.expected index 192c82d15..d9baf8698 100644 --- a/src/test/incorrect/basicassign1/gcc/basicassign1.expected +++ b/src/test/incorrect/basicassign1/gcc/basicassign1.expected @@ -10,7 +10,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $z_addr: bv64; axiom ($z_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else false) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -42,12 +38,9 @@ function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -55,8 +48,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -73,20 +66,14 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R31, Gamma_mem, Gamma_stack, R0, R1, R31, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1916bv64) == 1bv8); - free requires (memory_load8_le(mem, 1917bv64) == 0bv8); - free requires (memory_load8_le(mem, 1918bv64) == 2bv8); - free requires (memory_load8_le(mem, 1919bv64) == 0bv8); + free requires (memory_load32_le(mem, 1916bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign1/gcc/basicassign1_gtirb.expected b/src/test/incorrect/basicassign1/gcc/basicassign1_gtirb.expected index 92d417367..3e998dfcb 100644 --- a/src/test/incorrect/basicassign1/gcc/basicassign1_gtirb.expected +++ b/src/test/incorrect/basicassign1/gcc/basicassign1_gtirb.expected @@ -10,7 +10,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $z_addr: bv64; axiom ($z_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else false) } @@ -31,10 +31,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -42,12 +38,9 @@ function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -55,8 +48,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -73,20 +66,14 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R31, Gamma_mem, Gamma_stack, R0, R1, R31, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1916bv64) == 1bv8); - free requires (memory_load8_le(mem, 1917bv64) == 0bv8); - free requires (memory_load8_le(mem, 1918bv64) == 2bv8); - free requires (memory_load8_le(mem, 1919bv64) == 0bv8); + free requires (memory_load32_le(mem, 1916bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign1/gcc_O2/basicassign1.expected b/src/test/incorrect/basicassign1/gcc_O2/basicassign1.expected index 9fcdcc2bc..114827622 100644 --- a/src/test/incorrect/basicassign1/gcc_O2/basicassign1.expected +++ b/src/test/incorrect/basicassign1/gcc_O2/basicassign1.expected @@ -8,7 +8,7 @@ var {:extern} R2: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $z_addr: bv64; axiom ($z_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else false) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -40,12 +36,9 @@ function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -53,8 +46,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -71,18 +64,12 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R2, Gamma_mem, R0, R1, R2, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/incorrect/basicassign1/gcc_O2/basicassign1_gtirb.expected b/src/test/incorrect/basicassign1/gcc_O2/basicassign1_gtirb.expected index 7ef5b5223..391a905c7 100644 --- a/src/test/incorrect/basicassign1/gcc_O2/basicassign1_gtirb.expected +++ b/src/test/incorrect/basicassign1/gcc_O2/basicassign1_gtirb.expected @@ -8,7 +8,7 @@ var {:extern} R2: bv64; var {:extern} mem: [bv64]bv8; const {:extern} $z_addr: bv64; axiom ($z_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else false) } @@ -29,10 +29,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -40,12 +36,9 @@ function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -53,8 +46,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -71,18 +64,12 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R2, Gamma_mem, R0, R1, R2, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/incorrect/basicassign1/gcc_pic/basicassign1.expected b/src/test/incorrect/basicassign1/gcc_pic/basicassign1.expected index 2359fdec8..13d8861f3 100644 --- a/src/test/incorrect/basicassign1/gcc_pic/basicassign1.expected +++ b/src/test/incorrect/basicassign1/gcc_pic/basicassign1.expected @@ -10,7 +10,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $z_addr: bv64; axiom ($z_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else false) } @@ -35,10 +35,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -46,23 +42,20 @@ function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1980bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1981bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1982bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1983bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1980bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -79,28 +72,22 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R31, Gamma_mem, Gamma_stack, R0, R1, R31, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1980bv64) == 1bv8); - free requires (memory_load8_le(mem, 1981bv64) == 0bv8); - free requires (memory_load8_le(mem, 1982bv64) == 2bv8); - free requires (memory_load8_le(mem, 1983bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1980bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1980bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1981bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1982bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1983bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1980bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/incorrect/basicassign1/gcc_pic/basicassign1_gtirb.expected b/src/test/incorrect/basicassign1/gcc_pic/basicassign1_gtirb.expected index c9d86040b..207860a9c 100644 --- a/src/test/incorrect/basicassign1/gcc_pic/basicassign1_gtirb.expected +++ b/src/test/incorrect/basicassign1/gcc_pic/basicassign1_gtirb.expected @@ -10,7 +10,7 @@ var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; const {:extern} $z_addr: bv64; axiom ($z_addr == 69652bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else false) } @@ -35,10 +35,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -46,23 +42,20 @@ function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32 function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1980bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1981bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1982bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1983bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1980bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -79,28 +72,22 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R31, Gamma_mem, Gamma_stack, R0, R1, R31, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1980bv64) == 1bv8); - free requires (memory_load8_le(mem, 1981bv64) == 0bv8); - free requires (memory_load8_le(mem, 1982bv64) == 2bv8); - free requires (memory_load8_le(mem, 1983bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1980bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69652bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69656bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1980bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1981bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1982bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1983bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1980bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69652bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69656bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/incorrect/basicassign2/clang/basicassign2.expected b/src/test/incorrect/basicassign2/clang/basicassign2.expected index 61c920226..df488c4d1 100644 --- a/src/test/incorrect/basicassign2/clang/basicassign2.expected +++ b/src/test/incorrect/basicassign2/clang/basicassign2.expected @@ -16,7 +16,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69696bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -29,12 +29,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64) returns ([bv64]bv8) { @@ -43,12 +43,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -56,8 +53,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -74,20 +71,14 @@ procedure main(); modifies Gamma_R0, Gamma_R10, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_mem, Gamma_stack, R0, R10, R31, R8, R9, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1892bv64) == 1bv8); - free requires (memory_load8_le(mem, 1893bv64) == 0bv8); - free requires (memory_load8_le(mem, 1894bv64) == 2bv8); - free requires (memory_load8_le(mem, 1895bv64) == 0bv8); + free requires (memory_load32_le(mem, 1892bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign2/clang/basicassign2_gtirb.expected b/src/test/incorrect/basicassign2/clang/basicassign2_gtirb.expected index 0d8b20ef0..95de0c100 100644 --- a/src/test/incorrect/basicassign2/clang/basicassign2_gtirb.expected +++ b/src/test/incorrect/basicassign2/clang/basicassign2_gtirb.expected @@ -16,7 +16,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69696bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -29,12 +29,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64) returns ([bv64]bv8) { @@ -43,12 +43,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -56,8 +53,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -74,20 +71,14 @@ procedure main(); modifies Gamma_R0, Gamma_R10, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_mem, Gamma_stack, R0, R10, R31, R8, R9, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1892bv64) == 1bv8); - free requires (memory_load8_le(mem, 1893bv64) == 0bv8); - free requires (memory_load8_le(mem, 1894bv64) == 2bv8); - free requires (memory_load8_le(mem, 1895bv64) == 0bv8); + free requires (memory_load32_le(mem, 1892bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign2/clang_O2/basicassign2.expected b/src/test/incorrect/basicassign2/clang_O2/basicassign2.expected index 9b9dc95d6..55eab7986 100644 --- a/src/test/incorrect/basicassign2/clang_O2/basicassign2.expected +++ b/src/test/incorrect/basicassign2/clang_O2/basicassign2.expected @@ -10,7 +10,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69696bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -23,12 +23,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64) returns ([bv64]bv8) { @@ -37,12 +37,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -50,8 +47,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -68,18 +65,12 @@ procedure main(); modifies Gamma_R0, Gamma_R8, Gamma_R9, Gamma_mem, R0, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign2/clang_O2/basicassign2_gtirb.expected b/src/test/incorrect/basicassign2/clang_O2/basicassign2_gtirb.expected index b79b4ebee..ce04d30cc 100644 --- a/src/test/incorrect/basicassign2/clang_O2/basicassign2_gtirb.expected +++ b/src/test/incorrect/basicassign2/clang_O2/basicassign2_gtirb.expected @@ -10,7 +10,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69696bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -23,12 +23,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64) returns ([bv64]bv8) { @@ -37,12 +37,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -50,8 +47,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -68,18 +65,12 @@ procedure main(); modifies Gamma_R0, Gamma_R8, Gamma_R9, Gamma_mem, R0, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign2/clang_pic/basicassign2.expected b/src/test/incorrect/basicassign2/clang_pic/basicassign2.expected index ab4b6e7ec..e1615c0c2 100644 --- a/src/test/incorrect/basicassign2/clang_pic/basicassign2.expected +++ b/src/test/incorrect/basicassign2/clang_pic/basicassign2.expected @@ -16,7 +16,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69696bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -29,12 +29,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64) returns ([bv64]bv8) { @@ -43,23 +43,20 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69696bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -76,28 +73,22 @@ procedure main(); modifies Gamma_R0, Gamma_R10, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_mem, Gamma_stack, R0, R10, R31, R8, R9, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69696bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69696bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/incorrect/basicassign2/clang_pic/basicassign2_gtirb.expected b/src/test/incorrect/basicassign2/clang_pic/basicassign2_gtirb.expected index 6fa316b8c..c17318876 100644 --- a/src/test/incorrect/basicassign2/clang_pic/basicassign2_gtirb.expected +++ b/src/test/incorrect/basicassign2/clang_pic/basicassign2_gtirb.expected @@ -16,7 +16,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69688bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69696bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -29,12 +29,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64) returns ([bv64]bv8) { @@ -43,23 +43,20 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69696bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -76,28 +73,22 @@ procedure main(); modifies Gamma_R0, Gamma_R10, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_mem, Gamma_stack, R0, R10, R31, R8, R9, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69696bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69688bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69696bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69688bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/incorrect/basicassign2/gcc/basicassign2.expected b/src/test/incorrect/basicassign2/gcc/basicassign2.expected index 23656ef15..88af71796 100644 --- a/src/test/incorrect/basicassign2/gcc/basicassign2.expected +++ b/src/test/incorrect/basicassign2/gcc/basicassign2.expected @@ -12,7 +12,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69664bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -25,12 +25,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64) returns ([bv64]bv8) { @@ -39,12 +39,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -52,8 +49,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -70,20 +67,14 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R31, Gamma_mem, Gamma_stack, R0, R1, R31, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1916bv64) == 1bv8); - free requires (memory_load8_le(mem, 1917bv64) == 0bv8); - free requires (memory_load8_le(mem, 1918bv64) == 2bv8); - free requires (memory_load8_le(mem, 1919bv64) == 0bv8); + free requires (memory_load32_le(mem, 1916bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign2/gcc/basicassign2_gtirb.expected b/src/test/incorrect/basicassign2/gcc/basicassign2_gtirb.expected index 1b74dea32..9fe37828d 100644 --- a/src/test/incorrect/basicassign2/gcc/basicassign2_gtirb.expected +++ b/src/test/incorrect/basicassign2/gcc/basicassign2_gtirb.expected @@ -12,7 +12,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69664bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -25,12 +25,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64) returns ([bv64]bv8) { @@ -39,12 +39,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -52,8 +49,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -70,20 +67,14 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R31, Gamma_mem, Gamma_stack, R0, R1, R31, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1916bv64) == 1bv8); - free requires (memory_load8_le(mem, 1917bv64) == 0bv8); - free requires (memory_load8_le(mem, 1918bv64) == 2bv8); - free requires (memory_load8_le(mem, 1919bv64) == 0bv8); + free requires (memory_load32_le(mem, 1916bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign2/gcc_O2/basicassign2.expected b/src/test/incorrect/basicassign2/gcc_O2/basicassign2.expected index cbb173f38..af6252b59 100644 --- a/src/test/incorrect/basicassign2/gcc_O2/basicassign2.expected +++ b/src/test/incorrect/basicassign2/gcc_O2/basicassign2.expected @@ -10,7 +10,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69664bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -23,12 +23,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64) returns ([bv64]bv8) { @@ -37,12 +37,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -50,8 +47,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -68,18 +65,12 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R2, Gamma_mem, R0, R1, R2, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/incorrect/basicassign2/gcc_O2/basicassign2_gtirb.expected b/src/test/incorrect/basicassign2/gcc_O2/basicassign2_gtirb.expected index 8e67f332d..652073954 100644 --- a/src/test/incorrect/basicassign2/gcc_O2/basicassign2_gtirb.expected +++ b/src/test/incorrect/basicassign2/gcc_O2/basicassign2_gtirb.expected @@ -10,7 +10,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69664bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -23,12 +23,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64) returns ([bv64]bv8) { @@ -37,12 +37,9 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -50,8 +47,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -68,18 +65,12 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R2, Gamma_mem, R0, R1, R2, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/incorrect/basicassign2/gcc_pic/basicassign2.expected b/src/test/incorrect/basicassign2/gcc_pic/basicassign2.expected index f75604861..20875ec1c 100644 --- a/src/test/incorrect/basicassign2/gcc_pic/basicassign2.expected +++ b/src/test/incorrect/basicassign2/gcc_pic/basicassign2.expected @@ -12,7 +12,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69664bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -25,12 +25,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64) returns ([bv64]bv8) { @@ -39,23 +39,20 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1980bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1981bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1982bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1983bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69664bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1980bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69664bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -72,28 +69,22 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R31, Gamma_mem, Gamma_stack, R0, R1, R31, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1980bv64) == 1bv8); - free requires (memory_load8_le(mem, 1981bv64) == 0bv8); - free requires (memory_load8_le(mem, 1982bv64) == 2bv8); - free requires (memory_load8_le(mem, 1983bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69664bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1980bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69664bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1980bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1981bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1982bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1983bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69664bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1980bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69664bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/incorrect/basicassign2/gcc_pic/basicassign2_gtirb.expected b/src/test/incorrect/basicassign2/gcc_pic/basicassign2_gtirb.expected index 5b858de70..cb8945060 100644 --- a/src/test/incorrect/basicassign2/gcc_pic/basicassign2_gtirb.expected +++ b/src/test/incorrect/basicassign2/gcc_pic/basicassign2_gtirb.expected @@ -12,7 +12,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69664bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69656bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $z_addr) then true else (if (index == $secret_addr) then false else false)) } @@ -25,12 +25,12 @@ function {:extern} gamma_store64(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value][bvadd64(index, 1bv64) := value][bvadd64(index, 2bv64) := value][bvadd64(index, 3bv64) := value][bvadd64(index, 4bv64) := value][bvadd64(index, 5bv64) := value][bvadd64(index, 6bv64) := value][bvadd64(index, 7bv64) := value] } -function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { - (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] +function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { + (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64) returns ([bv64]bv8) { @@ -39,23 +39,20 @@ function {:extern} memory_store64_le(memory: [bv64]bv8, index: bv64, value: bv64 procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1980bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1981bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1982bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1983bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69664bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1980bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69664bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -72,28 +69,22 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R31, Gamma_mem, Gamma_stack, R0, R1, R31, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1980bv64) == 1bv8); - free requires (memory_load8_le(mem, 1981bv64) == 0bv8); - free requires (memory_load8_le(mem, 1982bv64) == 2bv8); - free requires (memory_load8_le(mem, 1983bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69664bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1980bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69664bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1980bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1981bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1982bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1983bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69664bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1980bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69664bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/incorrect/basicassign3/clang/basicassign3.expected b/src/test/incorrect/basicassign3/clang/basicassign3.expected index 573fcf38f..1ee2cd40f 100644 --- a/src/test/incorrect/basicassign3/clang/basicassign3.expected +++ b/src/test/incorrect/basicassign3/clang/basicassign3.expected @@ -16,7 +16,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69681bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69682bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $secret_addr) then false else (if (index == $z_addr) then true else false)) } @@ -29,6 +29,10 @@ function {:extern} gamma_store8(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value] } +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) +} + function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } @@ -44,12 +48,9 @@ function {:extern} memory_store8_le(memory: [bv64]bv8, index: bv64, value: bv8) function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -57,8 +58,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -75,20 +76,14 @@ procedure main(); modifies Gamma_R0, Gamma_R10, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_mem, Gamma_stack, R0, R10, R31, R8, R9, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1892bv64) == 1bv8); - free requires (memory_load8_le(mem, 1893bv64) == 0bv8); - free requires (memory_load8_le(mem, 1894bv64) == 2bv8); - free requires (memory_load8_le(mem, 1895bv64) == 0bv8); + free requires (memory_load32_le(mem, 1892bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign3/clang/basicassign3_gtirb.expected b/src/test/incorrect/basicassign3/clang/basicassign3_gtirb.expected index 9ded6dd3f..dda4fe687 100644 --- a/src/test/incorrect/basicassign3/clang/basicassign3_gtirb.expected +++ b/src/test/incorrect/basicassign3/clang/basicassign3_gtirb.expected @@ -16,7 +16,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69681bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69682bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $secret_addr) then false else (if (index == $z_addr) then true else false)) } @@ -29,6 +29,10 @@ function {:extern} gamma_store8(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value] } +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) +} + function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } @@ -45,12 +49,9 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -58,8 +59,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -76,20 +77,14 @@ procedure main(); modifies Gamma_R0, Gamma_R10, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_mem, Gamma_stack, R0, R10, R31, R8, R9, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1892bv64) == 1bv8); - free requires (memory_load8_le(mem, 1893bv64) == 0bv8); - free requires (memory_load8_le(mem, 1894bv64) == 2bv8); - free requires (memory_load8_le(mem, 1895bv64) == 0bv8); + free requires (memory_load32_le(mem, 1892bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1892bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1893bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1894bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1895bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1892bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign3/clang_O2/basicassign3.expected b/src/test/incorrect/basicassign3/clang_O2/basicassign3.expected index d8dec4426..900ca1cca 100644 --- a/src/test/incorrect/basicassign3/clang_O2/basicassign3.expected +++ b/src/test/incorrect/basicassign3/clang_O2/basicassign3.expected @@ -10,7 +10,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $secret_addr) then false else (if (index == $z_addr) then true else false)) } @@ -23,6 +23,10 @@ function {:extern} gamma_store8(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value] } +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) +} + function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } @@ -38,12 +42,9 @@ function {:extern} memory_store8_le(memory: [bv64]bv8, index: bv64, value: bv8) function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -51,8 +52,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -69,18 +70,12 @@ procedure main(); modifies Gamma_R0, Gamma_R8, Gamma_R9, Gamma_mem, R0, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign3/clang_O2/basicassign3_gtirb.expected b/src/test/incorrect/basicassign3/clang_O2/basicassign3_gtirb.expected index a00e3afa0..715f96918 100644 --- a/src/test/incorrect/basicassign3/clang_O2/basicassign3_gtirb.expected +++ b/src/test/incorrect/basicassign3/clang_O2/basicassign3_gtirb.expected @@ -10,7 +10,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69684bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69688bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $secret_addr) then false else (if (index == $z_addr) then true else false)) } @@ -23,6 +23,10 @@ function {:extern} gamma_store8(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value] } +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) +} + function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } @@ -39,12 +43,9 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -52,8 +53,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -70,18 +71,12 @@ procedure main(); modifies Gamma_R0, Gamma_R8, Gamma_R9, Gamma_mem, R0, R8, R9, mem; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1856bv64) == 1bv8); - free requires (memory_load8_le(mem, 1857bv64) == 0bv8); - free requires (memory_load8_le(mem, 1858bv64) == 2bv8); - free requires (memory_load8_le(mem, 1859bv64) == 0bv8); + free requires (memory_load32_le(mem, 1856bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load8_le(mem, 1856bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1857bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1858bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1859bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1856bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign3/clang_pic/basicassign3.expected b/src/test/incorrect/basicassign3/clang_pic/basicassign3.expected index d85f4196c..60ce354a8 100644 --- a/src/test/incorrect/basicassign3/clang_pic/basicassign3.expected +++ b/src/test/incorrect/basicassign3/clang_pic/basicassign3.expected @@ -16,7 +16,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69681bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69682bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $secret_addr) then false else (if (index == $z_addr) then true else false)) } @@ -33,6 +33,10 @@ function {:extern} gamma_store8(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value] } +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) +} + function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } @@ -48,23 +52,20 @@ function {:extern} memory_store8_le(memory: [bv64]bv8, index: bv64, value: bv8) function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69681bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69682bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69681bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -81,28 +82,22 @@ procedure main(); modifies Gamma_R0, Gamma_R10, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_mem, Gamma_stack, R0, R10, R31, R8, R9, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69681bv64); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69682bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69681bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69681bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69682bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69681bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/incorrect/basicassign3/clang_pic/basicassign3_gtirb.expected b/src/test/incorrect/basicassign3/clang_pic/basicassign3_gtirb.expected index 3af45974b..c0e66711e 100644 --- a/src/test/incorrect/basicassign3/clang_pic/basicassign3_gtirb.expected +++ b/src/test/incorrect/basicassign3/clang_pic/basicassign3_gtirb.expected @@ -16,7 +16,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69681bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69682bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $secret_addr) then false else (if (index == $z_addr) then true else false)) } @@ -33,6 +33,10 @@ function {:extern} gamma_store8(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value] } +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) +} + function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } @@ -49,23 +53,20 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69681bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69682bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69681bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -82,28 +83,22 @@ procedure main(); modifies Gamma_R0, Gamma_R10, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_mem, Gamma_stack, R0, R10, R31, R8, R9, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load64_le(mem, 69584bv64) == 69681bv64); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69682bv64); + free requires (memory_load64_le(mem, 69584bv64) == 69681bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); - free ensures (memory_load64_le(mem, 69584bv64) == 69681bv64); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69682bv64); + free ensures (memory_load64_le(mem, 69584bv64) == 69681bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/incorrect/basicassign3/gcc/basicassign3.expected b/src/test/incorrect/basicassign3/gcc/basicassign3.expected index 45ac95591..8058b3573 100644 --- a/src/test/incorrect/basicassign3/gcc/basicassign3.expected +++ b/src/test/incorrect/basicassign3/gcc/basicassign3.expected @@ -12,7 +12,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69650bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69649bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $secret_addr) then false else (if (index == $z_addr) then true else false)) } @@ -25,6 +25,10 @@ function {:extern} gamma_store8(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value] } +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) +} + function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } @@ -40,12 +44,9 @@ function {:extern} memory_store8_le(memory: [bv64]bv8, index: bv64, value: bv8) function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -53,8 +54,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -71,20 +72,14 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R31, Gamma_mem, Gamma_stack, R0, R1, R31, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1916bv64) == 1bv8); - free requires (memory_load8_le(mem, 1917bv64) == 0bv8); - free requires (memory_load8_le(mem, 1918bv64) == 2bv8); - free requires (memory_load8_le(mem, 1919bv64) == 0bv8); + free requires (memory_load32_le(mem, 1916bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign3/gcc/basicassign3_gtirb.expected b/src/test/incorrect/basicassign3/gcc/basicassign3_gtirb.expected index 6518433ba..572d5b366 100644 --- a/src/test/incorrect/basicassign3/gcc/basicassign3_gtirb.expected +++ b/src/test/incorrect/basicassign3/gcc/basicassign3_gtirb.expected @@ -12,7 +12,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69650bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69649bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $secret_addr) then false else (if (index == $z_addr) then true else false)) } @@ -25,6 +25,10 @@ function {:extern} gamma_store8(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value] } +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) +} + function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } @@ -41,12 +45,9 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -54,8 +55,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -72,20 +73,14 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R31, Gamma_mem, Gamma_stack, R0, R1, R31, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1916bv64) == 1bv8); - free requires (memory_load8_le(mem, 1917bv64) == 0bv8); - free requires (memory_load8_le(mem, 1918bv64) == 2bv8); - free requires (memory_load8_le(mem, 1919bv64) == 0bv8); + free requires (memory_load32_le(mem, 1916bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1916bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1917bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1918bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1919bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1916bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/incorrect/basicassign3/gcc_O2/basicassign3.expected b/src/test/incorrect/basicassign3/gcc_O2/basicassign3.expected index 2e74e5ca1..d14aea98f 100644 --- a/src/test/incorrect/basicassign3/gcc_O2/basicassign3.expected +++ b/src/test/incorrect/basicassign3/gcc_O2/basicassign3.expected @@ -10,7 +10,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69650bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69649bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $secret_addr) then false else (if (index == $z_addr) then true else false)) } @@ -23,6 +23,10 @@ function {:extern} gamma_store8(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value] } +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) +} + function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } @@ -38,12 +42,9 @@ function {:extern} memory_store8_le(memory: [bv64]bv8, index: bv64, value: bv8) function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -51,8 +52,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -69,18 +70,12 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R2, Gamma_mem, R0, R1, R2, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/incorrect/basicassign3/gcc_O2/basicassign3_gtirb.expected b/src/test/incorrect/basicassign3/gcc_O2/basicassign3_gtirb.expected index e83d081a2..3f85361a9 100644 --- a/src/test/incorrect/basicassign3/gcc_O2/basicassign3_gtirb.expected +++ b/src/test/incorrect/basicassign3/gcc_O2/basicassign3_gtirb.expected @@ -10,7 +10,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69650bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69649bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $secret_addr) then false else (if (index == $z_addr) then true else false)) } @@ -23,6 +23,10 @@ function {:extern} gamma_store8(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value] } +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) +} + function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } @@ -39,12 +43,9 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); @@ -52,8 +53,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -70,18 +71,12 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R2, Gamma_mem, R0, R1, R2, mem; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1872bv64); free requires (memory_load64_le(mem, 69024bv64) == 1792bv64); free requires (memory_load64_le(mem, 69616bv64) == 1536bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1536bv64); diff --git a/src/test/incorrect/basicassign3/gcc_pic/basicassign3.expected b/src/test/incorrect/basicassign3/gcc_pic/basicassign3.expected index c53177c2b..be209327b 100644 --- a/src/test/incorrect/basicassign3/gcc_pic/basicassign3.expected +++ b/src/test/incorrect/basicassign3/gcc_pic/basicassign3.expected @@ -12,7 +12,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69650bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69649bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $secret_addr) then false else (if (index == $z_addr) then true else false)) } @@ -29,6 +29,10 @@ function {:extern} gamma_store8(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value] } +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) +} + function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } @@ -44,23 +48,20 @@ function {:extern} memory_store8_le(memory: [bv64]bv8, index: bv64, value: bv8) function {:extern} {:bvbuiltin "zero_extend 56"} zero_extend56_8(bv8) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1980bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1981bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1982bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1983bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69650bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1980bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69649bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69650bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -77,28 +78,22 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R31, Gamma_mem, Gamma_stack, R0, R1, R31, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1980bv64) == 1bv8); - free requires (memory_load8_le(mem, 1981bv64) == 0bv8); - free requires (memory_load8_le(mem, 1982bv64) == 2bv8); - free requires (memory_load8_le(mem, 1983bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69650bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1980bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69649bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69650bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1980bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1981bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1982bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1983bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69650bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1980bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69649bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69650bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/incorrect/basicassign3/gcc_pic/basicassign3_gtirb.expected b/src/test/incorrect/basicassign3/gcc_pic/basicassign3_gtirb.expected index ccf993008..f4858673c 100644 --- a/src/test/incorrect/basicassign3/gcc_pic/basicassign3_gtirb.expected +++ b/src/test/incorrect/basicassign3/gcc_pic/basicassign3_gtirb.expected @@ -12,7 +12,7 @@ const {:extern} $secret_addr: bv64; axiom ($secret_addr == 69650bv64); const {:extern} $z_addr: bv64; axiom ($z_addr == 69649bv64); -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { (if (index == $secret_addr) then false else (if (index == $z_addr) then true else false)) } @@ -29,6 +29,10 @@ function {:extern} gamma_store8(gammaMap: [bv64]bool, index: bv64, value: bool) gammaMap[index := value] } +function {:extern} memory_load32_le(memory: [bv64]bv8, index: bv64) returns (bv32) { + (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))) +} + function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv64) { (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } @@ -45,23 +49,20 @@ function {:extern} {:bvbuiltin "zero_extend 24"} zero_extend24_8(bv8) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1980bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1981bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1982bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1983bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69650bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1980bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69649bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69650bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -78,28 +79,22 @@ procedure main(); modifies Gamma_R0, Gamma_R1, Gamma_R31, Gamma_mem, Gamma_stack, R0, R1, R31, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1980bv64) == 1bv8); - free requires (memory_load8_le(mem, 1981bv64) == 0bv8); - free requires (memory_load8_le(mem, 1982bv64) == 2bv8); - free requires (memory_load8_le(mem, 1983bv64) == 0bv8); - free requires (memory_load64_le(mem, 69608bv64) == 69650bv64); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load32_le(mem, 1980bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69649bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69608bv64) == 69650bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1980bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1981bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1982bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1983bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69608bv64) == 69650bv64); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load32_le(mem, 1980bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69649bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69608bv64) == 69650bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/incorrect/iflocal/clang/iflocal.expected b/src/test/incorrect/iflocal/clang/iflocal.expected index 0bee67490..b4d695d42 100644 --- a/src/test/incorrect/iflocal/clang/iflocal.expected +++ b/src/test/incorrect/iflocal/clang/iflocal.expected @@ -39,10 +39,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -52,12 +48,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -65,8 +58,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -83,20 +76,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_stack, NF, R0, R31, R8, VF, ZF, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/iflocal/clang/iflocal_gtirb.expected b/src/test/incorrect/iflocal/clang/iflocal_gtirb.expected index 27f877960..7296e2682 100644 --- a/src/test/incorrect/iflocal/clang/iflocal_gtirb.expected +++ b/src/test/incorrect/iflocal/clang/iflocal_gtirb.expected @@ -38,10 +38,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,12 +46,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -63,8 +56,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -81,20 +74,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_VF, Gamma_ZF, Gamma_stack, NF, R0, R31, R8, VF, ZF, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1896bv64) == 1bv8); - free requires (memory_load8_le(mem, 1897bv64) == 0bv8); - free requires (memory_load8_le(mem, 1898bv64) == 2bv8); - free requires (memory_load8_le(mem, 1899bv64) == 0bv8); + free requires (memory_load32_le(mem, 1896bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1896bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1897bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1898bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1899bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1896bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/iflocal/gcc/iflocal.expected b/src/test/incorrect/iflocal/gcc/iflocal.expected index 56c7ab8a8..f89816a93 100644 --- a/src/test/incorrect/iflocal/gcc/iflocal.expected +++ b/src/test/incorrect/iflocal/gcc/iflocal.expected @@ -37,10 +37,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -50,12 +46,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1880bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1881bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1882bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1883bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1880bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -63,8 +56,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -81,20 +74,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_stack, NF, R0, R31, VF, ZF, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1880bv64) == 1bv8); - free requires (memory_load8_le(mem, 1881bv64) == 0bv8); - free requires (memory_load8_le(mem, 1882bv64) == 2bv8); - free requires (memory_load8_le(mem, 1883bv64) == 0bv8); + free requires (memory_load32_le(mem, 1880bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1880bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1881bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1882bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1883bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1880bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/incorrect/iflocal/gcc/iflocal_gtirb.expected b/src/test/incorrect/iflocal/gcc/iflocal_gtirb.expected index ff10eb358..f9bfdc01e 100644 --- a/src/test/incorrect/iflocal/gcc/iflocal_gtirb.expected +++ b/src/test/incorrect/iflocal/gcc/iflocal_gtirb.expected @@ -36,10 +36,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -48,12 +44,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1880bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1881bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1882bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1883bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1880bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -61,8 +54,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -79,20 +72,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_stack, NF, R0, R31, VF, ZF, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1880bv64) == 1bv8); - free requires (memory_load8_le(mem, 1881bv64) == 0bv8); - free requires (memory_load8_le(mem, 1882bv64) == 2bv8); - free requires (memory_load8_le(mem, 1883bv64) == 0bv8); + free requires (memory_load32_le(mem, 1880bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1880bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1881bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1882bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1883bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1880bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/incorrect/nestedifglobal/clang/nestedifglobal.expected b/src/test/incorrect/nestedifglobal/clang/nestedifglobal.expected index 53de1f20f..f21ee0c3b 100644 --- a/src/test/incorrect/nestedifglobal/clang/nestedifglobal.expected +++ b/src/test/incorrect/nestedifglobal/clang/nestedifglobal.expected @@ -18,7 +18,7 @@ var {:extern} VF: bv1; var {:extern} ZF: bv1; var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -45,10 +45,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -58,12 +54,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1976bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1977bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1978bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1979bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1976bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -71,8 +64,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -89,20 +82,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, R9, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1976bv64) == 1bv8); - free requires (memory_load8_le(mem, 1977bv64) == 0bv8); - free requires (memory_load8_le(mem, 1978bv64) == 2bv8); - free requires (memory_load8_le(mem, 1979bv64) == 0bv8); + free requires (memory_load32_le(mem, 1976bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1976bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1977bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1978bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1979bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1976bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/nestedifglobal/clang/nestedifglobal_gtirb.expected b/src/test/incorrect/nestedifglobal/clang/nestedifglobal_gtirb.expected index d813bfa91..6378df02e 100644 --- a/src/test/incorrect/nestedifglobal/clang/nestedifglobal_gtirb.expected +++ b/src/test/incorrect/nestedifglobal/clang/nestedifglobal_gtirb.expected @@ -18,7 +18,7 @@ var {:extern} VF: bv1; var {:extern} ZF: bv1; var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -44,10 +44,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -57,12 +53,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1980bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1981bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1982bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1983bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1980bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); @@ -70,8 +63,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -88,20 +81,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, R9, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 1980bv64) == 1bv8); - free requires (memory_load8_le(mem, 1981bv64) == 0bv8); - free requires (memory_load8_le(mem, 1982bv64) == 2bv8); - free requires (memory_load8_le(mem, 1983bv64) == 0bv8); + free requires (memory_load32_le(mem, 1980bv64) == 131073bv32); free requires (memory_load64_le(mem, 69064bv64) == 1808bv64); free requires (memory_load64_le(mem, 69072bv64) == 1728bv64); free requires (memory_load64_le(mem, 69592bv64) == 1812bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1980bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1981bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1982bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1983bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1980bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69064bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69072bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1812bv64); diff --git a/src/test/incorrect/nestedifglobal/clang_pic/nestedifglobal.expected b/src/test/incorrect/nestedifglobal/clang_pic/nestedifglobal.expected index 8f50a130a..80d21764f 100644 --- a/src/test/incorrect/nestedifglobal/clang_pic/nestedifglobal.expected +++ b/src/test/incorrect/nestedifglobal/clang_pic/nestedifglobal.expected @@ -18,7 +18,7 @@ var {:extern} VF: bv1; var {:extern} ZF: bv1; var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -53,10 +53,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -70,23 +66,20 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2052bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2053bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2054bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2055bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2052bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -103,28 +96,22 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, R9, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 2052bv64) == 1bv8); - free requires (memory_load8_le(mem, 2053bv64) == 0bv8); - free requires (memory_load8_le(mem, 2054bv64) == 2bv8); - free requires (memory_load8_le(mem, 2055bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 2052bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69688bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2052bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2053bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2054bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2055bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 2052bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/incorrect/nestedifglobal/clang_pic/nestedifglobal_gtirb.expected b/src/test/incorrect/nestedifglobal/clang_pic/nestedifglobal_gtirb.expected index 8bce8a767..88dd27ef3 100644 --- a/src/test/incorrect/nestedifglobal/clang_pic/nestedifglobal_gtirb.expected +++ b/src/test/incorrect/nestedifglobal/clang_pic/nestedifglobal_gtirb.expected @@ -18,7 +18,7 @@ var {:extern} VF: bv1; var {:extern} ZF: bv1; var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -52,10 +52,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -69,23 +65,20 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2060bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2061bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2062bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2063bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2060bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -102,28 +95,22 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R31, Gamma_R8, Gamma_R9, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R31, R8, R9, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69664bv64) == 0bv64); free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); - free requires (memory_load8_le(mem, 2060bv64) == 1bv8); - free requires (memory_load8_le(mem, 2061bv64) == 0bv8); - free requires (memory_load8_le(mem, 2062bv64) == 2bv8); - free requires (memory_load8_le(mem, 2063bv64) == 0bv8); - free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); - free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); + free requires (memory_load32_le(mem, 2060bv64) == 131073bv32); free requires (memory_load64_le(mem, 69048bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); free requires (memory_load64_le(mem, 69568bv64) == 69688bv64); + free requires (memory_load64_le(mem, 69576bv64) == 69684bv64); free requires (memory_load64_le(mem, 69592bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69056bv64) == 1792bv64); + free requires (memory_load64_le(mem, 69672bv64) == 69672bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2060bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2061bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2062bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2063bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); - free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); + free ensures (memory_load32_le(mem, 2060bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69048bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69568bv64) == 69688bv64); + free ensures (memory_load64_le(mem, 69576bv64) == 69684bv64); free ensures (memory_load64_le(mem, 69592bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69056bv64) == 1792bv64); + free ensures (memory_load64_le(mem, 69672bv64) == 69672bv64); implementation main() { diff --git a/src/test/incorrect/nestedifglobal/gcc/nestedifglobal.expected b/src/test/incorrect/nestedifglobal/gcc/nestedifglobal.expected index e2f024896..ef127f1d9 100644 --- a/src/test/incorrect/nestedifglobal/gcc/nestedifglobal.expected +++ b/src/test/incorrect/nestedifglobal/gcc/nestedifglobal.expected @@ -16,7 +16,7 @@ var {:extern} VF: bv1; var {:extern} ZF: bv1; var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -43,10 +43,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -56,12 +52,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -69,8 +62,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -87,20 +80,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R1, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R1, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1956bv64) == 1bv8); - free requires (memory_load8_le(mem, 1957bv64) == 0bv8); - free requires (memory_load8_le(mem, 1958bv64) == 2bv8); - free requires (memory_load8_le(mem, 1959bv64) == 0bv8); + free requires (memory_load32_le(mem, 1956bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1956bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1957bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1958bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1959bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1956bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/incorrect/nestedifglobal/gcc/nestedifglobal_gtirb.expected b/src/test/incorrect/nestedifglobal/gcc/nestedifglobal_gtirb.expected index df7992d44..51b6b796d 100644 --- a/src/test/incorrect/nestedifglobal/gcc/nestedifglobal_gtirb.expected +++ b/src/test/incorrect/nestedifglobal/gcc/nestedifglobal_gtirb.expected @@ -16,7 +16,7 @@ var {:extern} VF: bv1; var {:extern} ZF: bv1; var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -42,10 +42,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -55,12 +51,9 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); @@ -68,8 +61,8 @@ procedure {:extern} rely(); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -86,20 +79,14 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R1, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R1, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 1964bv64) == 1bv8); - free requires (memory_load8_le(mem, 1965bv64) == 0bv8); - free requires (memory_load8_le(mem, 1966bv64) == 2bv8); - free requires (memory_load8_le(mem, 1967bv64) == 0bv8); + free requires (memory_load32_le(mem, 1964bv64) == 131073bv32); free requires (memory_load64_le(mem, 69016bv64) == 1808bv64); free requires (memory_load64_le(mem, 69024bv64) == 1728bv64); free requires (memory_load64_le(mem, 69616bv64) == 1812bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 1964bv64) == 1bv8); - free ensures (memory_load8_le(mem, 1965bv64) == 0bv8); - free ensures (memory_load8_le(mem, 1966bv64) == 2bv8); - free ensures (memory_load8_le(mem, 1967bv64) == 0bv8); + free ensures (memory_load32_le(mem, 1964bv64) == 131073bv32); free ensures (memory_load64_le(mem, 69016bv64) == 1808bv64); free ensures (memory_load64_le(mem, 69024bv64) == 1728bv64); free ensures (memory_load64_le(mem, 69616bv64) == 1812bv64); diff --git a/src/test/incorrect/nestedifglobal/gcc_pic/nestedifglobal.expected b/src/test/incorrect/nestedifglobal/gcc_pic/nestedifglobal.expected index 03cb61a25..eba786ea8 100644 --- a/src/test/incorrect/nestedifglobal/gcc_pic/nestedifglobal.expected +++ b/src/test/incorrect/nestedifglobal/gcc_pic/nestedifglobal.expected @@ -16,7 +16,7 @@ var {:extern} VF: bv1; var {:extern} ZF: bv1; var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -47,10 +47,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -60,23 +56,20 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2020bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2021bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2022bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2023bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2020bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -93,28 +86,22 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R1, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R1, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 2020bv64) == 1bv8); - free requires (memory_load8_le(mem, 2021bv64) == 0bv8); - free requires (memory_load8_le(mem, 2022bv64) == 2bv8); - free requires (memory_load8_le(mem, 2023bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load32_le(mem, 2020bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2020bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2021bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2022bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2023bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 2020bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { diff --git a/src/test/incorrect/nestedifglobal/gcc_pic/nestedifglobal_gtirb.expected b/src/test/incorrect/nestedifglobal/gcc_pic/nestedifglobal_gtirb.expected index 1c44677d8..8e445464f 100644 --- a/src/test/incorrect/nestedifglobal/gcc_pic/nestedifglobal_gtirb.expected +++ b/src/test/incorrect/nestedifglobal/gcc_pic/nestedifglobal_gtirb.expected @@ -16,7 +16,7 @@ var {:extern} VF: bv1; var {:extern} ZF: bv1; var {:extern} mem: [bv64]bv8; var {:extern} stack: [bv64]bv8; -function {:extern} L(memory: [bv64]bv8, index: bv64) returns (bool) { +function {:extern} L(mem$in: [bv64]bv8, index: bv64) returns (bool) { false } @@ -46,10 +46,6 @@ function {:extern} memory_load64_le(memory: [bv64]bv8, index: bv64) returns (bv6 (memory[bvadd64(index, 7bv64)] ++ (memory[bvadd64(index, 6bv64)] ++ (memory[bvadd64(index, 5bv64)] ++ (memory[bvadd64(index, 4bv64)] ++ (memory[bvadd64(index, 3bv64)] ++ (memory[bvadd64(index, 2bv64)] ++ (memory[bvadd64(index, 1bv64)] ++ memory[index]))))))) } -function {:extern} memory_load8_le(memory: [bv64]bv8, index: bv64) returns (bv8) { - memory[index] -} - function {:extern} memory_store32_le(memory: [bv64]bv8, index: bv64, value: bv32) returns ([bv64]bv8) { memory[index := value[8:0]][bvadd64(index, 1bv64) := value[16:8]][bvadd64(index, 2bv64) := value[24:16]][bvadd64(index, 3bv64) := value[32:24]] } @@ -59,23 +55,20 @@ function {:extern} {:bvbuiltin "zero_extend 1"} zero_extend1_32(bv32) returns (b function {:extern} {:bvbuiltin "zero_extend 32"} zero_extend32_32(bv32) returns (bv64); procedure {:extern} rely(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); - free ensures (memory_load8_le(mem, 2028bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2029bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2030bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2031bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + ensures (mem == old(mem)); + free ensures (memory_load32_le(mem, 2028bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); procedure {:extern} rely_transitive(); modifies Gamma_mem, mem; - ensures (mem == old(mem)); ensures (Gamma_mem == old(Gamma_mem)); + ensures (mem == old(mem)); implementation {:extern} rely_transitive() { @@ -92,28 +85,22 @@ procedure main(); modifies CF, Gamma_CF, Gamma_NF, Gamma_R0, Gamma_R1, Gamma_R31, Gamma_VF, Gamma_ZF, Gamma_mem, Gamma_stack, NF, R0, R1, R31, VF, ZF, mem, stack; free requires (memory_load64_le(mem, 69632bv64) == 0bv64); free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load8_le(mem, 2028bv64) == 1bv8); - free requires (memory_load8_le(mem, 2029bv64) == 0bv8); - free requires (memory_load8_le(mem, 2030bv64) == 2bv8); - free requires (memory_load8_le(mem, 2031bv64) == 0bv8); - free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); - free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); - free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load32_le(mem, 2028bv64) == 131073bv32); + free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); free requires (memory_load64_le(mem, 69008bv64) == 1792bv64); free requires (memory_load64_le(mem, 69592bv64) == 69656bv64); - free requires (memory_load64_le(mem, 69000bv64) == 1872bv64); + free requires (memory_load64_le(mem, 69600bv64) == 69652bv64); + free requires (memory_load64_le(mem, 69616bv64) == 1876bv64); + free requires (memory_load64_le(mem, 69640bv64) == 69640bv64); free ensures (Gamma_R31 == old(Gamma_R31)); free ensures (R31 == old(R31)); - free ensures (memory_load8_le(mem, 2028bv64) == 1bv8); - free ensures (memory_load8_le(mem, 2029bv64) == 0bv8); - free ensures (memory_load8_le(mem, 2030bv64) == 2bv8); - free ensures (memory_load8_le(mem, 2031bv64) == 0bv8); - free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); - free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); - free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load32_le(mem, 2028bv64) == 131073bv32); + free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); free ensures (memory_load64_le(mem, 69008bv64) == 1792bv64); free ensures (memory_load64_le(mem, 69592bv64) == 69656bv64); - free ensures (memory_load64_le(mem, 69000bv64) == 1872bv64); + free ensures (memory_load64_le(mem, 69600bv64) == 69652bv64); + free ensures (memory_load64_le(mem, 69616bv64) == 1876bv64); + free ensures (memory_load64_le(mem, 69640bv64) == 69640bv64); implementation main() { From 4586ce26329779734ba8312be164e5178f1af168 Mon Sep 17 00:00:00 2001 From: l-kent <56100168+l-kent@users.noreply.github.com> Date: Wed, 13 Nov 2024 11:53:50 +1000 Subject: [PATCH 104/104] MemoryLoad as Statement (#269) * make MemoryLoad a type of Statement, some general cleanup * consolidate constant propagation variations * fix renamed parser * fix deprecated override (this is still inelegant but it will have to do) * add labels to MemoryLoad * make DSA behaviour consistent with previous * fix issues --------- Co-authored-by: l-kent --- src/main/antlr4/{Semantics.g4 => ASLp.g4} | 2 +- src/main/scala/analysis/ANR.scala | 38 ++- src/main/scala/analysis/Analysis.scala | 183 +----------- .../scala/analysis/BasicIRConstProp.scala | 75 ----- .../scala/analysis/ConstantPropagation.scala | 192 ++++++++++++ .../scala/analysis/GlobalRegionAnalysis.scala | 30 +- .../analysis/InterLiveVarsAnalysis.scala | 54 ++-- .../InterprocSteensgaardAnalysis.scala | 36 ++- .../analysis/IntraLiveVarsAnalysis.scala | 23 +- .../scala/analysis/MemoryRegionAnalysis.scala | 71 +++-- src/main/scala/analysis/RNA.scala | 43 ++- .../ReachingDefinitionsAnalysis.scala | 30 +- src/main/scala/analysis/ReachingDefs.scala | 8 +- .../scala/analysis/RegToMemAnalysis.scala | 72 ----- src/main/scala/analysis/RegionInjector.scala | 47 +-- .../scala/analysis/SummaryGenerator.scala | 46 ++- src/main/scala/analysis/TaintAnalysis.scala | 66 ++--- src/main/scala/analysis/UtilMethods.scala | 27 +- src/main/scala/analysis/VSA.scala | 65 ++-- .../analysis/VariableDependencyAnalysis.scala | 52 ++-- src/main/scala/analysis/WriteToAnalysis.scala | 27 +- .../data_structure_analysis/Graph.scala | 80 ++--- .../data_structure_analysis/LocalPhase.scala | 69 ++--- .../SymbolicAddressAnalysis.scala | 14 +- .../data_structure_analysis/Utility.scala | 1 - .../scala/analysis/solvers/IDESolver.scala | 67 +++-- src/main/scala/bap/BAPExpr.scala | 107 +------ src/main/scala/bap/BAPProgram.scala | 11 +- src/main/scala/bap/BAPStatement.scala | 14 +- src/main/scala/ir/Expr.scala | 23 -- src/main/scala/ir/Interpreter.scala | 36 ++- src/main/scala/ir/Statement.scala | 43 ++- src/main/scala/ir/Visitor.scala | 53 ++-- src/main/scala/ir/cilvisitor/CILVisitor.scala | 39 ++- src/main/scala/translating/BAPToIR.scala | 168 ++++++++++- ...emanticsLoader.scala => GTIRBLoader.scala} | 280 ++++++++++-------- src/main/scala/translating/GTIRBToIR.scala | 14 +- src/main/scala/translating/ILtoIL.scala | 34 +-- src/main/scala/translating/IRToBoogie.scala | 33 ++- src/main/scala/util/RunUtils.scala | 96 +++--- .../scala/DataStructureAnalysisTest.scala | 38 +-- src/test/scala/LiveVarsAnalysisTests.scala | 46 +-- src/test/scala/PointsToTest.scala | 28 +- src/test/scala/TaintAnalysisTests.scala | 22 +- src/test/scala/ir/CILVisitorTest.scala | 10 +- src/test/scala/ir/IRTest.scala | 42 +-- src/test/scala/ir/SingleCallInvariant.scala | 18 +- 47 files changed, 1268 insertions(+), 1305 deletions(-) rename src/main/antlr4/{Semantics.g4 => ASLp.g4} (99%) delete mode 100644 src/main/scala/analysis/BasicIRConstProp.scala create mode 100644 src/main/scala/analysis/ConstantPropagation.scala delete mode 100644 src/main/scala/analysis/RegToMemAnalysis.scala rename src/main/scala/translating/{SemanticsLoader.scala => GTIRBLoader.scala} (68%) diff --git a/src/main/antlr4/Semantics.g4 b/src/main/antlr4/ASLp.g4 similarity index 99% rename from src/main/antlr4/Semantics.g4 rename to src/main/antlr4/ASLp.g4 index 821111783..372f4ced5 100644 --- a/src/main/antlr4/Semantics.g4 +++ b/src/main/antlr4/ASLp.g4 @@ -1,4 +1,4 @@ -grammar Semantics; +grammar ASLp; // See aslp/libASL/asl.ott for reference grammar Bap-ali-plugin/asli_lifer.ml may also be useful for // visitors diff --git a/src/main/scala/analysis/ANR.scala b/src/main/scala/analysis/ANR.scala index 196ef8634..c318bbaee 100644 --- a/src/main/scala/analysis/ANR.scala +++ b/src/main/scala/analysis/ANR.scala @@ -7,7 +7,7 @@ import scala.collection.immutable /** * Calculates the set of variables that are not read after being written up to that point in the program. - * Useful for detecting dead stores, constants and if what variables are passed as parameters in a function call. + * Useful for detecting dead stores, constants and which variables are passed as parameters in a function call. */ trait ANRAnalysis(program: Program) { @@ -26,35 +26,41 @@ trait ANRAnalysis(program: Program) { /** Default implementation of eval. */ def eval(cmd: Command, s: Set[Variable]): Set[Variable] = { - var m = s cmd match { case assume: Assume => - m.diff(assume.body.variables) + s.diff(assume.body.variables) case assert: Assert => - m.diff(assert.body.variables) - case memoryAssign: MemoryAssign => - m.diff(memoryAssign.index.variables) + s.diff(assert.body.variables) + case memoryStore: MemoryStore => + s.diff(memoryStore.index.variables) case indirectCall: IndirectCall => - m - indirectCall.target - case assign: Assign => - m = m.diff(assign.rhs.variables) - if ignoreRegions.contains(assign.lhs) then m else m + assign.lhs + s - indirectCall.target + case assign: LocalAssign => + val m = s.diff(assign.rhs.variables) + if (ignoreRegions.contains(assign.lhs)) { + m + } else { + m + assign.lhs + } + case memoryLoad: MemoryLoad => + val m = s.diff(memoryLoad.index.variables) + if (ignoreRegions.contains(memoryLoad.lhs)) { + m + } else { + m + memoryLoad.lhs + } case _ => - m + s } } /** Transfer function for state lattice elements. */ - def localTransfer(n: CFGPosition, s: Set[Variable]): Set[Variable] = n match { + def transfer(n: CFGPosition, s: Set[Variable]): Set[Variable] = n match { case cmd: Command => eval(cmd, s) case _ => s // ignore other kinds of nodes } - - /** Transfer function for state lattice elements. - */ - def transfer(n: CFGPosition, s: Set[Variable]): Set[Variable] = localTransfer(n, s) } class ANRAnalysisSolver(program: Program) extends ANRAnalysis(program) diff --git a/src/main/scala/analysis/Analysis.scala b/src/main/scala/analysis/Analysis.scala index 969bbc2e3..9a77d4032 100644 --- a/src/main/scala/analysis/Analysis.scala +++ b/src/main/scala/analysis/Analysis.scala @@ -1,14 +1,5 @@ package analysis -import ir.* -import analysis.solvers.* - -import scala.collection.mutable.{ArrayBuffer, HashMap, ListBuffer} -import java.io.{File, PrintWriter} -import scala.collection.mutable -import scala.collection.immutable -import util.Logger - /** Trait for program analyses. * * @tparam R @@ -18,176 +9,4 @@ trait Analysis[+R]: /** Performs the analysis and returns the result. */ - def analyze(): R - -/** Base class for value analysis with simple (non-lifted) lattice. - */ -trait ConstantPropagation(val program: Program) { - /** The lattice of abstract states. - */ - - val valuelattice: ConstantPropagationLattice = ConstantPropagationLattice() - - val statelattice: MapLattice[Variable, FlatElement[BitVecLiteral], ConstantPropagationLattice] = MapLattice(valuelattice) - - /** Default implementation of eval. - */ - def eval(exp: Expr, env: Map[Variable, FlatElement[BitVecLiteral]]): FlatElement[BitVecLiteral] = { - import valuelattice._ - exp match { - case id: Variable => env(id) - case n: BitVecLiteral => bv(n) - case ze: ZeroExtend => zero_extend(ze.extension, eval(ze.body, env)) - case se: SignExtend => sign_extend(se.extension, eval(se.body, env)) - case e: Extract => extract(e.end, e.start, eval(e.body, env)) - case bin: BinaryExpr => - val left = eval(bin.arg1, env) - val right = eval(bin.arg2, env) - bin.op match { - case BVADD => bvadd(left, right) - case BVSUB => bvsub(left, right) - case BVMUL => bvmul(left, right) - case BVUDIV => bvudiv(left, right) - case BVSDIV => bvsdiv(left, right) - case BVSREM => bvsrem(left, right) - case BVUREM => bvurem(left, right) - case BVSMOD => bvsmod(left, right) - case BVAND => bvand(left, right) - case BVOR => bvor(left, right) - case BVXOR => bvxor(left, right) - case BVNAND => bvnand(left, right) - case BVNOR => bvnor(left, right) - case BVXNOR => bvxnor(left, right) - case BVSHL => bvshl(left, right) - case BVLSHR => bvlshr(left, right) - case BVASHR => bvashr(left, right) - case BVCOMP => bvcomp(left, right) - case BVCONCAT => concat(left, right) - } - case un: UnaryExpr => - val arg = eval(un.arg, env) - un.op match { - case BVNOT => bvnot(arg) - case BVNEG => bvneg(arg) - } - case _ => valuelattice.top - } - } - - - /** Transfer function for state lattice elements. - */ - def localTransfer(n: CFGPosition, s: Map[Variable, FlatElement[BitVecLiteral]]): Map[Variable, FlatElement[BitVecLiteral]] = { - n match { - // assignments - case la: Assign => - s + (la.lhs -> eval(la.rhs, s)) - // all others: like no-ops - case _ => s - } - } - - /** The analysis lattice. - */ - val lattice: MapLattice[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]], MapLattice[Variable, FlatElement[BitVecLiteral], ConstantPropagationLattice]] = MapLattice(statelattice) - - val domain: Set[CFGPosition] = Set.empty ++ program - - /** Transfer function for state lattice elements. (Same as `localTransfer` for simple value analysis.) - */ - def transfer(n: CFGPosition, s: Map[Variable, FlatElement[BitVecLiteral]]): Map[Variable, FlatElement[BitVecLiteral]] = localTransfer(n, s) -} - -class ConstantPropagationSolver(program: Program) extends ConstantPropagation(program) - with SimplePushDownWorklistFixpointSolver[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]], MapLattice[Variable, FlatElement[BitVecLiteral], ConstantPropagationLattice]] - with IRInterproceduralForwardDependencies - with Analysis[Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]]] - - -/** Base class for value analysis with simple (non-lifted) lattice. - */ -trait ConstantPropagationWithSSA(val program: Program, val reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]) { - /** The lattice of abstract states. - */ - - val valuelattice: ConstantPropagationLatticeWithSSA = ConstantPropagationLatticeWithSSA() - - val statelattice: MapLattice[RegisterWrapperEqualSets, Set[BitVecLiteral], ConstantPropagationLatticeWithSSA] = MapLattice(valuelattice) - - /** Default implementation of eval. - */ - def eval(exp: Expr, env: Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], n: CFGPosition): Set[BitVecLiteral] = { - import valuelattice._ - exp match { - case id: Variable => env(RegisterWrapperEqualSets(id, getUse(id, n, reachingDefs))) - case n: BitVecLiteral => bv(n) - case ze: ZeroExtend => zero_extend(ze.extension, eval(ze.body, env, n)) - case se: SignExtend => sign_extend(se.extension, eval(se.body, env, n)) - case e: Extract => extract(e.end, e.start, eval(e.body, env, n)) - case bin: BinaryExpr => - val left = eval(bin.arg1, env, n) - val right = eval(bin.arg2, env, n) - bin.op match { - case BVADD => bvadd(left, right) - case BVSUB => bvsub(left, right) - case BVMUL => bvmul(left, right) - case BVUDIV => bvudiv(left, right) - case BVSDIV => bvsdiv(left, right) - case BVSREM => bvsrem(left, right) - case BVUREM => bvurem(left, right) - case BVSMOD => bvsmod(left, right) - case BVAND => bvand(left, right) - case BVOR => bvor(left, right) - case BVXOR => bvxor(left, right) - case BVNAND => bvnand(left, right) - case BVNOR => bvnor(left, right) - case BVXNOR => bvxnor(left, right) - case BVSHL => bvshl(left, right) - case BVLSHR => bvlshr(left, right) - case BVASHR => bvashr(left, right) - case BVCOMP => bvcomp(left, right) - case BVCONCAT => concat(left, right) - } - - case un: UnaryExpr => - val arg = eval(un.arg, env, n) - un.op match { - case BVNOT => bvnot(arg) - case BVNEG => bvneg(arg) - } - - case _ => Set.empty - } - } - - /** Transfer function for state lattice elements. - */ - def localTransfer(n: CFGPosition, s: Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]): Map[RegisterWrapperEqualSets, Set[BitVecLiteral]] = - n match { - case a: Assign => - val lhsWrappers = s.collect { - case (k, v) if RegisterVariableWrapper(k.variable, k.assigns) == RegisterVariableWrapper(a.lhs, getDefinition(a.lhs, a, reachingDefs)) => (k, v) - } - if (lhsWrappers.nonEmpty) { - s ++ lhsWrappers.map((k, v) => (k, v.union(eval(a.rhs, s, a)))) - } else { - s + (RegisterWrapperEqualSets(a.lhs, getDefinition(a.lhs, a, reachingDefs)) -> eval(a.rhs, s, n)) - } - case _ => s - } - - /** The analysis lattice. - */ - val lattice: MapLattice[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], MapLattice[RegisterWrapperEqualSets, Set[BitVecLiteral], ConstantPropagationLatticeWithSSA]] = MapLattice(statelattice) - - val domain: Set[CFGPosition] = Set.empty ++ program - - /** Transfer function for state lattice elements. (Same as `localTransfer` for simple value analysis.) - */ - def transfer(n: CFGPosition, s: Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]): Map[RegisterWrapperEqualSets, Set[BitVecLiteral]] = localTransfer(n, s) -} - -class ConstantPropagationSolverWithSSA(program: Program, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]) extends ConstantPropagationWithSSA(program, reachingDefs) - with SimplePushDownWorklistFixpointSolver[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], MapLattice[RegisterWrapperEqualSets, Set[BitVecLiteral], ConstantPropagationLatticeWithSSA]] - with IRInterproceduralForwardDependencies - with Analysis[Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]]] + def analyze(): R \ No newline at end of file diff --git a/src/main/scala/analysis/BasicIRConstProp.scala b/src/main/scala/analysis/BasicIRConstProp.scala deleted file mode 100644 index 36a6d72d8..000000000 --- a/src/main/scala/analysis/BasicIRConstProp.scala +++ /dev/null @@ -1,75 +0,0 @@ -package analysis -import ir.* -import analysis.solvers.* - -trait ILValueAnalysisMisc: - val valuelattice: ConstantPropagationLattice = ConstantPropagationLattice() - val statelattice: MapLattice[Variable, FlatElement[BitVecLiteral], ConstantPropagationLattice] = MapLattice(valuelattice) - - def eval(exp: Expr, env: Map[Variable, FlatElement[BitVecLiteral]]): FlatElement[BitVecLiteral] = - import valuelattice._ - exp match - case id: Variable => env(id) - case n: BitVecLiteral => bv(n) - case ze: ZeroExtend => zero_extend(ze.extension, eval(ze.body, env)) - case se: SignExtend => sign_extend(se.extension, eval(se.body, env)) - case e: Extract => extract(e.end, e.start, eval(e.body, env)) - case bin: BinaryExpr => - val left = eval(bin.arg1, env) - val right = eval(bin.arg2, env) - bin.op match - case BVADD => bvadd(left, right) - case BVSUB => bvsub(left, right) - case BVMUL => bvmul(left, right) - case BVUDIV => bvudiv(left, right) - case BVSDIV => bvsdiv(left, right) - case BVSREM => bvsrem(left, right) - case BVUREM => bvurem(left, right) - case BVSMOD => bvsmod(left, right) - case BVAND => bvand(left, right) - case BVOR => bvor(left, right) - case BVXOR => bvxor(left, right) - case BVNAND => bvnand(left, right) - case BVNOR => bvnor(left, right) - case BVXNOR => bvxnor(left, right) - case BVSHL => bvshl(left, right) - case BVLSHR => bvlshr(left, right) - case BVASHR => bvashr(left, right) - case BVCOMP => bvcomp(left, right) - case BVCONCAT => concat(left, right) - - case un: UnaryExpr => - val arg = eval(un.arg, env) - - un.op match - case BVNOT => bvnot(arg) - case BVNEG => bvneg(arg) - - case _ => valuelattice.top - - private final val callerPreservedRegisters = Set("R0", "R1", "R2", "R3", "R4", "R5", "R6", "R7", "R8", "R9", "R10", - "R11", "R12", "R13", "R14", "R15", "R16", "R17", "R18", "R30") - - /** Transfer function for state lattice elements. - */ - def localTransfer(n: CFGPosition, s: Map[Variable, FlatElement[BitVecLiteral]]): Map[Variable, FlatElement[BitVecLiteral]] = - n match - case la: Assign => - s + (la.lhs -> eval(la.rhs, s)) - case c: Call => s ++ callerPreservedRegisters.filter(reg => s.keys.exists(_.name == reg)).map(n => Register(n, 64) -> statelattice.sublattice.top).toMap - case _ => s - - - -object IRSimpleValueAnalysis: - - class Solver(prog: Program) extends ILValueAnalysisMisc - with IRIntraproceduralForwardDependencies - with Analysis[Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]]] - with SimplePushDownWorklistFixpointSolver[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]], MapLattice[Variable, FlatElement[BitVecLiteral], ConstantPropagationLattice]]: - /* Worklist initial set */ - //override val lattice: MapLattice[CFGPosition, statelattice.type] = MapLattice(statelattice) - override val lattice: MapLattice[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]], MapLattice[Variable, FlatElement[BitVecLiteral], ConstantPropagationLattice]] = MapLattice(statelattice) - - override val domain: Set[CFGPosition] = computeDomain(IntraProcIRCursor, prog.procedures).toSet - def transfer(n: CFGPosition, s: Map[Variable, FlatElement[BitVecLiteral]]): Map[Variable, FlatElement[BitVecLiteral]] = localTransfer(n, s) diff --git a/src/main/scala/analysis/ConstantPropagation.scala b/src/main/scala/analysis/ConstantPropagation.scala new file mode 100644 index 000000000..a5f6d2d00 --- /dev/null +++ b/src/main/scala/analysis/ConstantPropagation.scala @@ -0,0 +1,192 @@ +package analysis +import ir.* +import analysis.solvers.* + +trait ConstantPropagation { + val valuelattice: ConstantPropagationLattice = ConstantPropagationLattice() + val statelattice: MapLattice[Variable, FlatElement[BitVecLiteral], ConstantPropagationLattice] = MapLattice(valuelattice) + val lattice: MapLattice[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]], MapLattice[Variable, FlatElement[BitVecLiteral], ConstantPropagationLattice]] = MapLattice(statelattice) + + def eval(exp: Expr, env: Map[Variable, FlatElement[BitVecLiteral]]): FlatElement[BitVecLiteral] = { + import valuelattice.* + exp match { + case id: Variable => env(id) + case n: BitVecLiteral => bv(n) + case ze: ZeroExtend => zero_extend(ze.extension, eval(ze.body, env)) + case se: SignExtend => sign_extend(se.extension, eval(se.body, env)) + case e: Extract => extract(e.end, e.start, eval(e.body, env)) + case bin: BinaryExpr => + val left = eval(bin.arg1, env) + val right = eval(bin.arg2, env) + bin.op match { + case BVADD => bvadd(left, right) + case BVSUB => bvsub(left, right) + case BVMUL => bvmul(left, right) + case BVUDIV => bvudiv(left, right) + case BVSDIV => bvsdiv(left, right) + case BVSREM => bvsrem(left, right) + case BVUREM => bvurem(left, right) + case BVSMOD => bvsmod(left, right) + case BVAND => bvand(left, right) + case BVOR => bvor(left, right) + case BVXOR => bvxor(left, right) + case BVNAND => bvnand(left, right) + case BVNOR => bvnor(left, right) + case BVXNOR => bvxnor(left, right) + case BVSHL => bvshl(left, right) + case BVLSHR => bvlshr(left, right) + case BVASHR => bvashr(left, right) + case BVCOMP => bvcomp(left, right) + case BVCONCAT => concat(left, right) + } + case un: UnaryExpr => + val arg = eval(un.arg, env) + un.op match { + case BVNOT => bvnot(arg) + case BVNEG => bvneg(arg) + } + case _ => valuelattice.top + } + } +} + +class IntraProcConstantPropagation(prog: Program) extends ConstantPropagation +with IRIntraproceduralForwardDependencies +with Analysis[Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]]] +with SimplePushDownWorklistFixpointSolver[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]], MapLattice[Variable, FlatElement[BitVecLiteral], ConstantPropagationLattice]] { + override val domain: Set[CFGPosition] = computeDomain(IntraProcIRCursor, prog.procedures).toSet + + private final val callerPreservedRegisters: Set[Variable] = Set("R0", "R1", "R2", "R3", "R4", "R5", "R6", "R7", "R8", "R9", "R10", + "R11", "R12", "R13", "R14", "R15", "R16", "R17", "R18", "R30").map(n => Register(n, 64)) + + def transfer(n: CFGPosition, s: Map[Variable, FlatElement[BitVecLiteral]]): Map[Variable, FlatElement[BitVecLiteral]] = { + n match { + case la: LocalAssign => + s + (la.lhs -> eval(la.rhs, s)) + case l: MemoryLoad => + s + (l.lhs -> valuelattice.top) + case _: Call => s.map { (k, v) => + if (callerPreservedRegisters.contains(k)) { + (k, valuelattice.top) + } else { + (k, v) + } + } + case _ => s + } + } +} + +class InterProcConstantPropagation(val program: Program) extends ConstantPropagation +with SimplePushDownWorklistFixpointSolver[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]], MapLattice[Variable, FlatElement[BitVecLiteral], ConstantPropagationLattice]] +with IRInterproceduralForwardDependencies +with Analysis[Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]]] { + + def transfer(n: CFGPosition, s: Map[Variable, FlatElement[BitVecLiteral]]): Map[Variable, FlatElement[BitVecLiteral]] = { + n match { + // assignments + case la: LocalAssign => + s + (la.lhs -> eval(la.rhs, s)) + case load: MemoryLoad => + s + (load.lhs -> valuelattice.top) + // all others: like no-ops + case _ => s + } + } + + override val domain: Set[CFGPosition] = Set.empty ++ program +} + +/** Base class for value analysis with simple (non-lifted) lattice. + */ +trait ConstantPropagationWithSSA(val program: Program, val reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]) { + /** The lattice of abstract states. + */ + + val valuelattice: ConstantPropagationLatticeWithSSA = ConstantPropagationLatticeWithSSA() + + val statelattice: MapLattice[RegisterWrapperEqualSets, Set[BitVecLiteral], ConstantPropagationLatticeWithSSA] = MapLattice(valuelattice) + + /** Default implementation of eval. + */ + def eval(exp: Expr, env: Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], n: CFGPosition): Set[BitVecLiteral] = { + import valuelattice.* + exp match { + case id: Variable => env(RegisterWrapperEqualSets(id, getUse(id, n, reachingDefs))) + case n: BitVecLiteral => bv(n) + case ze: ZeroExtend => zero_extend(ze.extension, eval(ze.body, env, n)) + case se: SignExtend => sign_extend(se.extension, eval(se.body, env, n)) + case e: Extract => extract(e.end, e.start, eval(e.body, env, n)) + case bin: BinaryExpr => + val left = eval(bin.arg1, env, n) + val right = eval(bin.arg2, env, n) + bin.op match { + case BVADD => bvadd(left, right) + case BVSUB => bvsub(left, right) + case BVMUL => bvmul(left, right) + case BVUDIV => bvudiv(left, right) + case BVSDIV => bvsdiv(left, right) + case BVSREM => bvsrem(left, right) + case BVUREM => bvurem(left, right) + case BVSMOD => bvsmod(left, right) + case BVAND => bvand(left, right) + case BVOR => bvor(left, right) + case BVXOR => bvxor(left, right) + case BVNAND => bvnand(left, right) + case BVNOR => bvnor(left, right) + case BVXNOR => bvxnor(left, right) + case BVSHL => bvshl(left, right) + case BVLSHR => bvlshr(left, right) + case BVASHR => bvashr(left, right) + case BVCOMP => bvcomp(left, right) + case BVCONCAT => concat(left, right) + } + + case un: UnaryExpr => + val arg = eval(un.arg, env, n) + un.op match { + case BVNOT => bvnot(arg) + case BVNEG => bvneg(arg) + } + + case _ => Set.empty + } + } + + /** Transfer function for state lattice elements. + */ + def transfer(n: CFGPosition, s: Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]): Map[RegisterWrapperEqualSets, Set[BitVecLiteral]] = + n match { + case a: LocalAssign => + val lhsWrappers = s.collect { + case (k, v) if RegisterVariableWrapper(k.variable, k.assigns) == RegisterVariableWrapper(a.lhs, getDefinition(a.lhs, a, reachingDefs)) => (k, v) + } + if (lhsWrappers.nonEmpty) { + s ++ lhsWrappers.map((k, v) => (k, v.union(eval(a.rhs, s, a)))) + } else { + s + (RegisterWrapperEqualSets(a.lhs, getDefinition(a.lhs, a, reachingDefs)) -> eval(a.rhs, s, n)) + } + case l: MemoryLoad => + val lhsWrappers = s.collect { + case (k, v) if RegisterVariableWrapper(k.variable, k.assigns) == RegisterVariableWrapper(l.lhs, getDefinition(l.lhs, l, reachingDefs)) => (k, v) + } + if (lhsWrappers.nonEmpty) { + s ++ lhsWrappers + } else { + s + (RegisterWrapperEqualSets(l.lhs, getDefinition(l.lhs, l, reachingDefs)) -> Set()) + } + + case _ => s + } + + /** The analysis lattice. + */ + val lattice: MapLattice[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], MapLattice[RegisterWrapperEqualSets, Set[BitVecLiteral], ConstantPropagationLatticeWithSSA]] = MapLattice(statelattice) + + val domain: Set[CFGPosition] = Set.empty ++ program +} + +class ConstantPropagationSolverWithSSA(program: Program, reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]) extends ConstantPropagationWithSSA(program, reachingDefs) + with SimplePushDownWorklistFixpointSolver[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]], MapLattice[RegisterWrapperEqualSets, Set[BitVecLiteral], ConstantPropagationLatticeWithSSA]] + with IRInterproceduralForwardDependencies + with Analysis[Map[CFGPosition, Map[RegisterWrapperEqualSets, Set[BitVecLiteral]]]] diff --git a/src/main/scala/analysis/GlobalRegionAnalysis.scala b/src/main/scala/analysis/GlobalRegionAnalysis.scala index 47b6b8ddb..2157b4d80 100644 --- a/src/main/scala/analysis/GlobalRegionAnalysis.scala +++ b/src/main/scala/analysis/GlobalRegionAnalysis.scala @@ -88,7 +88,6 @@ trait GlobalRegionAnalysis(val program: Program, } else { Set() } - case _: MemoryLoad => ??? case _: UninterpretedFunction => Set.empty case variable: Variable => val ctx = getUse(variable, n, reachingDefs) @@ -169,16 +168,13 @@ trait GlobalRegionAnalysis(val program: Program, */ def localTransfer(n: CFGPosition, s: Set[DataRegion]): Set[DataRegion] = { n match { - case memAssign: MemoryAssign => - checkIfDefined(evalMemLoadToGlobal(memAssign.index, memAssign.size, memAssign), n) - case assign: Assign => - val unwrapped = unwrapExpr(assign.rhs) - if (unwrapped.isDefined) { - checkIfDefined(evalMemLoadToGlobal(unwrapped.get.index, unwrapped.get.size, assign, loadOp = true), n) - } else { - // this is a constant but we need to check if it is a data region - checkIfDefined(evalMemLoadToGlobal(assign.rhs, 1, assign), n) - } + case store: MemoryStore => + checkIfDefined(evalMemLoadToGlobal(store.index, store.size, store), n) + case load: MemoryLoad => + checkIfDefined(evalMemLoadToGlobal(load.index, load.size, load, loadOp = true), n) + case assign: LocalAssign => + // this is a constant but we need to check if it is a data region + checkIfDefined(evalMemLoadToGlobal(assign.rhs, 1, assign), n) case _ => Set() } @@ -188,12 +184,12 @@ trait GlobalRegionAnalysis(val program: Program, } class GlobalRegionAnalysisSolver( - program: Program, - domain: Set[CFGPosition], - constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - mmm: MemoryModelMap, - vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] + program: Program, + domain: Set[CFGPosition], + constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + mmm: MemoryModelMap, + vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] ) extends GlobalRegionAnalysis(program, domain, constantProp, reachingDefs, mmm, vsaResult) with IRIntraproceduralForwardDependencies with Analysis[Map[CFGPosition, Set[DataRegion]]] diff --git a/src/main/scala/analysis/InterLiveVarsAnalysis.scala b/src/main/scala/analysis/InterLiveVarsAnalysis.scala index cbe3076c6..ef9173a65 100644 --- a/src/main/scala/analysis/InterLiveVarsAnalysis.scala +++ b/src/main/scala/analysis/InterLiveVarsAnalysis.scala @@ -1,7 +1,7 @@ package analysis import analysis.solvers.BackwardIDESolver -import ir.{Assert, Assume, Block, GoTo, CFGPosition, Command, DirectCall, IndirectCall, Assign, MemoryAssign, Unreachable, Return, Procedure, Program, Variable, toShortString} +import ir.{Assert, LocalAssign, Assume, CFGPosition, Command, DirectCall, IndirectCall, MemoryLoad, MemoryStore, Procedure, Program, Return, Variable} /** * Micro-transfer-functions for LiveVar analysis @@ -28,54 +28,68 @@ trait LiveVarsAnalysisFunctions extends BackwardIDEAnalysis[Variable, TwoElement } def edgesCallToAfterCall(call: Command, aftercall: DirectCall)(d: DL): Map[DL, EdgeFunction[TwoElement]] = { - d match - case Left(value) => Map() // maps all variables before the call to bottom + d match { + case Left(_) => Map() // maps all variables before the call to bottom case Right(_) => Map(d -> IdEdge()) + } } def edgesOther(n: CFGPosition)(d: DL): Map[DL, EdgeFunction[TwoElement]] = { - n match - case Assign(variable, expr, _) => // (s - variable) ++ expr.variables - d match + n match { + case LocalAssign(variable, expr, _) => // (s - variable) ++ expr.variables + d match { case Left(value) => if value == variable then Map() else Map(d -> IdEdge()) - - case Right(_) => expr.variables.foldLeft(Map[DL, EdgeFunction[TwoElement]](d -> IdEdge())) { + case Right(_) => + expr.variables.foldLeft(Map[DL, EdgeFunction[TwoElement]](d -> IdEdge())) { + (mp, expVar) => mp + (Left(expVar) -> ConstEdge(TwoElementTop)) + } + } + case MemoryLoad(lhs, _, index, _, _, _) => + d match { + case Left(value) => + if value == lhs then + Map() + else + Map(d -> IdEdge()) + case Right(_) => index.variables.foldLeft(Map[DL, EdgeFunction[TwoElement]](d -> IdEdge())) { (mp, expVar) => mp + (Left(expVar) -> ConstEdge(TwoElementTop)) } - - case MemoryAssign(_, index, value, _, _, _) => // s ++ store.index.variables ++ store.value.variables - d match - case Left(value) => Map(d -> IdEdge()) + } + case MemoryStore(_, index, value, _, _, _) => // s ++ store.index.variables ++ store.value.variables + d match { + case Left(_) => Map(d -> IdEdge()) case Right(_) => (index.variables ++ value.variables).foldLeft(Map[DL, EdgeFunction[TwoElement]](d -> IdEdge())) { (mp, storVar) => mp + (Left(storVar) -> ConstEdge(TwoElementTop)) } - + } case Assume(expr, _, _, _) => // s ++ expr.variables - d match - case Left(value) => Map(d -> IdEdge()) + d match { + case Left(_) => Map(d -> IdEdge()) case Right(_) => expr.variables.foldLeft(Map(d -> IdEdge()): Map[DL, EdgeFunction[TwoElement]]) { (mp, expVar) => mp + (Left(expVar) -> ConstEdge(TwoElementTop)) } - + } case Assert(expr, _, _) => // s ++ expr.variables - d match - case Left(value) => Map(d -> IdEdge()) + d match { + case Left(_) => Map(d -> IdEdge()) case Right(_) => expr.variables.foldLeft(Map[DL, EdgeFunction[TwoElement]](d -> IdEdge())) { (mp, expVar) => mp + (Left(expVar) -> ConstEdge(TwoElementTop)) } + } case IndirectCall(variable, _) => - d match + d match { case Left(value) => if value != variable then Map(d -> IdEdge()) else Map() case Right(_) => Map(d -> IdEdge(), Left(variable) -> ConstEdge(TwoElementTop)) + } case _ => Map(d -> IdEdge()) - + } } } diff --git a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala index c05f83d60..ea34505f1 100644 --- a/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala +++ b/src/main/scala/analysis/InterprocSteensgaardAnalysis.scala @@ -28,10 +28,10 @@ case class RegisterWrapperEqualSets(variable: Variable, assigns: Set[Assign]) * expression node in the AST. It is implemented using [[analysis.solvers.UnionFindSolver]]. */ class InterprocSteensgaardAnalysis( - domain: Set[CFGPosition], - mmm: MemoryModelMap, - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]) extends Analysis[Any] { + domain: Set[CFGPosition], + mmm: MemoryModelMap, + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]]) extends Analysis[Any] { val solver: UnionFindSolver[StTerm] = UnionFindSolver() @@ -80,7 +80,8 @@ class InterprocSteensgaardAnalysis( val alloc = mmm.nodeToRegion(directCall).head val defs = getDefinition(mallocVariable, directCall, reachingDefs) unify(IdentifierVariable(RegisterWrapperEqualSets(mallocVariable, defs)), PointerRef(AllocVariable(alloc))) - case assign: Assign => + case assign: LocalAssign => + // TODO: unsound val unwrapped = unwrapExprToVar(assign.rhs) if (unwrapped.isDefined) { // X1 = X2: [[X1]] = [[X2]] @@ -97,11 +98,11 @@ class InterprocSteensgaardAnalysis( } unify(IdentifierVariable(RegisterWrapperEqualSets(X1, getDefinition(X1, assign, reachingDefs))), alpha) } - case memoryAssign: MemoryAssign => + case memoryStore: MemoryStore => // *X1 = X2: [[X1]] = ↑a ^ [[X2]] = a where a is a fresh term variable val X1_star = mmm.nodeToRegion(node) - // TODO: This is risky as it tries to coerce every value to a region (needed for functionpointer example) - val unwrapped = unwrapExprToVar(memoryAssign.value) + // TODO: This is not sound + val unwrapped = unwrapExprToVar(memoryStore.value) if (unwrapped.isDefined) { val X2 = unwrapped.get val X2_regions: Set[MemoryRegion] = vsaApproximation(X2, node) @@ -115,6 +116,25 @@ class InterprocSteensgaardAnalysis( unify(ExpressionVariable(x), alpha) } } + case memoryLoad: MemoryLoad => + // TODO: unsound + val unwrapped = unwrapExprToVar(memoryLoad.index) + if (unwrapped.isDefined) { + // X1 = X2: [[X1]] = [[X2]] + val X1 = memoryLoad.lhs + val X2 = unwrapped.get + unify(IdentifierVariable(RegisterWrapperEqualSets(X1, getDefinition(X1, memoryLoad, reachingDefs))), IdentifierVariable(RegisterWrapperEqualSets(X2, getUse(X2, memoryLoad, reachingDefs)))) + } else { + // X1 = *X2: [[X2]] = ↑a ^ [[X1]] = a where a is a fresh term variable + val X1 = memoryLoad.lhs + val X2_star = mmm.nodeToRegion(node) + val alpha = FreshVariable() + X2_star.foreach { x => + unify(PointerRef(alpha), ExpressionVariable(x)) + } + unify(IdentifierVariable(RegisterWrapperEqualSets(X1, getDefinition(X1, memoryLoad, reachingDefs))), alpha) + } + case _ => // do nothing TODO: Maybe LocalVar too? } } diff --git a/src/main/scala/analysis/IntraLiveVarsAnalysis.scala b/src/main/scala/analysis/IntraLiveVarsAnalysis.scala index a576b27fb..1271aa720 100644 --- a/src/main/scala/analysis/IntraLiveVarsAnalysis.scala +++ b/src/main/scala/analysis/IntraLiveVarsAnalysis.scala @@ -1,28 +1,31 @@ package analysis import analysis.solvers.SimpleWorklistFixpointSolver -import ir.{Assert, Assume, Block, CFGPosition, Call, DirectCall, GoTo, IndirectCall, Jump, Assign, MemoryAssign, NOP, Procedure, Program, Statement, Variable, Return, Unreachable} +import ir.{Assert, Assume, Block, CFGPosition, Call, DirectCall, GoTo, IndirectCall, Jump, LocalAssign, MemoryLoad, MemoryStore, Procedure, Program, Statement, Variable, Return, Unreachable} -abstract class LivenessAnalysis(program: Program) extends Analysis[Any]: +abstract class LivenessAnalysis(program: Program) extends Analysis[Any] { val lattice: MapLattice[CFGPosition, Set[Variable], PowersetLattice[Variable]] = MapLattice(PowersetLattice()) val domain: Set[CFGPosition] = Set.empty ++ program def transfer(n: CFGPosition, s: Set[Variable]): Set[Variable] = { n match { - case p: Procedure => s - case b: Block => s - case Assign(variable, expr, _) => (s - variable) ++ expr.variables - case MemoryAssign(_, index, value, _, _, _) => s ++ index.variables ++ value.variables + case _: Procedure => s + case _: Block => s + case LocalAssign(variable, expr, _) => (s - variable) ++ expr.variables + case MemoryStore(_, index, value, _, _, _) => s ++ index.variables ++ value.variables + case MemoryLoad(lhs, _, index, _, _, _) => (s - lhs) ++ index.variables case Assume(expr, _, _, _) => s ++ expr.variables case Assert(expr, _, _) => s ++ expr.variables case IndirectCall(variable, _) => s + variable - case c: DirectCall => s - case g: GoTo => s - case r: Return => s - case r: Unreachable => s + case _: DirectCall => s + case _: GoTo => s + case _: Return => s + case _: Unreachable => s case _ => ??? } } +} + class IntraLiveVarsAnalysis(program: Program) extends LivenessAnalysis(program) diff --git a/src/main/scala/analysis/MemoryRegionAnalysis.scala b/src/main/scala/analysis/MemoryRegionAnalysis.scala index 5f8e07560..d40957842 100644 --- a/src/main/scala/analysis/MemoryRegionAnalysis.scala +++ b/src/main/scala/analysis/MemoryRegionAnalysis.scala @@ -1,6 +1,6 @@ package analysis -import analysis.BitVectorEval.isNegative +import analysis.BitVectorEval.bv2SignedInt import analysis.solvers.SimpleWorklistFixpointSolver import ir.* import util.Logger @@ -62,7 +62,7 @@ trait MemoryRegionAnalysis(val program: Program, Logger.debug("Stack detection") Logger.debug(spList) stmt match { - case assign: Assign => + case assign: LocalAssign => if (spList.contains(assign.rhs)) { // add lhs to spList spList.addOne(assign.lhs) @@ -104,10 +104,11 @@ trait MemoryRegionAnalysis(val program: Program, evaluateExpression(binExpr.arg2, constantProp(n)) match { case Some(b: BitVecLiteral) => val ctx = getUse(variable, n, reachingDefs) - for { - i <- ctx - stackRegion <- eval(i.rhs, Set.empty, i, subAccess) - } yield { + val stackRegions = ctx.flatMap { + case l: LocalAssign => eval(l.rhs, l, subAccess) + case m: MemoryLoad => eval(m.index, m, m.size) + } + for (stackRegion <- stackRegions) yield { val nextOffset = bitVectorOpToBigIntOp(binExpr.op, stackRegion.start, b.value) poolMaster(nextOffset, IRWalk.procedure(n), subAccess) } @@ -115,7 +116,7 @@ trait MemoryRegionAnalysis(val program: Program, Set() } case _ => - eval(binExpr, Set.empty, n, subAccess) + eval(binExpr, n, subAccess) } reducedRegions } @@ -126,7 +127,10 @@ trait MemoryRegionAnalysis(val program: Program, // TODO: nicer way to deal with loops (a variable is being incremented in a loop) val regions = ctx.flatMap { i => if (i != n) { - eval(i.rhs, Set.empty, i, subAccess) + i match { + case l: LocalAssign => eval(l.rhs, l, subAccess) + case m: MemoryLoad => eval(m.index, m, m.size) + } } else { Set() } @@ -134,7 +138,7 @@ trait MemoryRegionAnalysis(val program: Program, regions } - def eval(exp: Expr, env: Set[StackRegion], n: Command, subAccess: BigInt): Set[StackRegion] = { + def eval(exp: Expr, n: Command, subAccess: BigInt): Set[StackRegion] = { if (graResult(n).nonEmpty) { Set.empty // skip global memory regions } else { @@ -143,7 +147,7 @@ trait MemoryRegionAnalysis(val program: Program, if (spList.contains(binOp.arg1)) { evaluateExpression(binOp.arg2, constantProp(n)) match { case Some(b: BitVecLiteral) => - val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value + val negB = bv2SignedInt(b) Set(poolMaster(negB, IRWalk.procedure(n), subAccess)) case None => Set.empty } @@ -161,12 +165,10 @@ trait MemoryRegionAnalysis(val program: Program, case variable: Variable => evaluateExpression(variable, constantProp(n)) match { case Some(b: BitVecLiteral) => - eval(b, env, n, subAccess) + eval(b, n, subAccess) case _ => reducibleVariable(variable, n, subAccess) } - case memoryLoad: MemoryLoad => - eval(memoryLoad.index, env, n, memoryLoad.size) // ignore case where it could be a global region (loaded later in MMM from relf) case _: BitVecLiteral => Set.empty @@ -202,7 +204,7 @@ trait MemoryRegionAnalysis(val program: Program, if (directCall.target.name == "malloc") { evaluateExpression(mallocVariable, constantProp(n)) match { case Some(b: BitVecLiteral) => - val negB = if isNegative(b) then b.value - BigInt(2).pow(b.size) else b.value + val negB = bv2SignedInt(b) val (name, start) = nextMallocCount(negB) val newHeapRegion = HeapRegion(name, start, negB, IRWalk.procedure(n)) addReturnHeap(directCall, newHeapRegion) @@ -218,25 +220,18 @@ trait MemoryRegionAnalysis(val program: Program, } else { s } - case memAssign: MemoryAssign => - val result = eval(memAssign.index, s, memAssign, memAssign.size) + case memAssign: MemoryStore => + val result = eval(memAssign.index, memAssign, memAssign.size) // if (result.size > 1) { // //throw new Exception(s"Memory load resulted in multiple regions ${result} for mem load $memoryLoad") // addMergableRegions(result) // } result - case assign: Assign => + case assign: LocalAssign => stackDetection(assign) - val unwrapped = unwrapExpr(assign.rhs) - if (unwrapped.isDefined) { - eval(unwrapped.get.index, s, assign, unwrapped.get.size) - // if (result.size > 1) { - // //throw new Exception(s"Memory load resulted in multiple regions ${result} for mem load $memoryLoad") - // addMergableRegions(result) - // } - } else { - Set() - } + Set() + case load: MemoryLoad => + eval(load.index, load, load.size) case _ => s } @@ -244,17 +239,17 @@ trait MemoryRegionAnalysis(val program: Program, } class MemoryRegionAnalysisSolver( - program: Program, - domain: Set[CFGPosition], - globals: Map[BigInt, String], - globalOffsets: Map[BigInt, BigInt], - subroutines: Map[BigInt, String], - constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - ANRResult: Map[CFGPosition, Set[Variable]], - RNAResult: Map[CFGPosition, Set[Variable]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - graResult: Map[CFGPosition, Set[DataRegion]], - mmm: MemoryModelMap + program: Program, + domain: Set[CFGPosition], + globals: Map[BigInt, String], + globalOffsets: Map[BigInt, BigInt], + subroutines: Map[BigInt, String], + constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + ANRResult: Map[CFGPosition, Set[Variable]], + RNAResult: Map[CFGPosition, Set[Variable]], + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + graResult: Map[CFGPosition, Set[DataRegion]], + mmm: MemoryModelMap ) extends MemoryRegionAnalysis(program, domain, globals, globalOffsets, subroutines, constantProp, ANRResult, RNAResult, reachingDefs, graResult, mmm) with IRIntraproceduralForwardDependencies with Analysis[Map[CFGPosition, Set[StackRegion]]] diff --git a/src/main/scala/analysis/RNA.scala b/src/main/scala/analysis/RNA.scala index b58e74dd9..e15dbe914 100644 --- a/src/main/scala/analysis/RNA.scala +++ b/src/main/scala/analysis/RNA.scala @@ -22,47 +22,44 @@ trait RNAAnalysis(program: Program) { private val linkRegister = Register("R30", 64) private val framePointer = Register("R29", 64) - private val ignoreRegions: Set[Expr] = Set(linkRegister, framePointer, stackPointer) + private val ignoreRegions: Set[Variable] = Set(linkRegister, framePointer, stackPointer) - /** Default implementation of eval. - */ def eval(cmd: Command, s: Set[Variable]): Set[Variable] = { - var m = s cmd match { case assume: Assume => - m.union(assume.body.variables.filter(!ignoreRegions.contains(_))) + s ++ (assume.body.variables -- ignoreRegions) case assert: Assert => - m.union(assert.body.variables.filter(!ignoreRegions.contains(_))) - case memoryAssign: MemoryAssign => - m.union(memoryAssign.index.variables.filter(!ignoreRegions.contains(_))) + s ++ (assert.body.variables -- ignoreRegions) + case memoryStore: MemoryStore => + s ++ (memoryStore.index.variables -- ignoreRegions) case indirectCall: IndirectCall => - if (ignoreRegions.contains(indirectCall.target)) return m - m + indirectCall.target - case assign: Assign => - m = m - assign.lhs - m.union(assign.rhs.variables.filter(!ignoreRegions.contains(_))) + if (ignoreRegions.contains(indirectCall.target)) { + s + } else { + s + indirectCall.target + } + case assign: LocalAssign => + val m = s - assign.lhs + m ++ (assign.rhs.variables -- ignoreRegions) + case memoryLoad: MemoryLoad => + val m = s - memoryLoad.lhs + m ++ (memoryLoad.index.variables -- ignoreRegions) case _ => - m + s } } /** Transfer function for state lattice elements. */ - def localTransfer(n: CFGPosition, s: Set[Variable]): Set[Variable] = n match { + def transfer(n: CFGPosition, s: Set[Variable]): Set[Variable] = n match { case cmd: Command => eval(cmd, s) case _ => s // ignore other kinds of nodes } - /** Transfer function for state lattice elements. - */ - def transfer(n: CFGPosition, s: Set[Variable]): Set[Variable] = localTransfer(n, s) } -class RNAAnalysisSolver( - program: Program, -) extends RNAAnalysis(program) +class RNAAnalysisSolver(program: Program) extends RNAAnalysis(program) with IRIntraproceduralBackwardDependencies with Analysis[Map[CFGPosition, Set[Variable]]] - with SimpleWorklistFixpointSolver[CFGPosition, Set[Variable], PowersetLattice[Variable]] { -} \ No newline at end of file + with SimpleWorklistFixpointSolver[CFGPosition, Set[Variable], PowersetLattice[Variable]] \ No newline at end of file diff --git a/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala b/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala index 93aa4ad6e..07fe443d2 100644 --- a/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala +++ b/src/main/scala/analysis/ReachingDefinitionsAnalysis.scala @@ -21,16 +21,12 @@ trait ReachingDefinitionsAnalysis(program: Program) { val domain: Set[CFGPosition] = Set.empty ++ program - def transfer(n: CFGPosition, s: (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])): (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]]) = - localTransfer(n, s) - - def localTransfer( - n: CFGPosition, - s: (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]]) - ): (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]]) = n match { - case cmd: Command => - eval(cmd, s) - case _ => s + def transfer(n: CFGPosition, s: (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])): (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]]) = { + n match { + case cmd: Command => + eval(cmd, s) + case _ => s + } } private def transformUses(vars: Set[Variable], s: (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])): (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]]) = { @@ -42,7 +38,7 @@ trait ReachingDefinitionsAnalysis(program: Program) { def eval(cmd: Command, s: (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])): (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]]) = cmd match { - case assign: Assign => + case assign: LocalAssign => // do the rhs first (should reset the values for this node to the empty set) // for each variable in the rhs, find the definitions from the lattice lhs and add them to the lattice rhs // for lhs, addOrReplace the definition @@ -55,8 +51,16 @@ trait ReachingDefinitionsAnalysis(program: Program) { (s(0) + (lhs -> Set(assign)), rhsUseDefs) case assert: Assert => transformUses(assert.body.variables, s) - case memoryAssign: MemoryAssign => - transformUses(memoryAssign.index.variables ++ memoryAssign.value.variables, s) + case memoryStore: MemoryStore => + transformUses(memoryStore.index.variables ++ memoryStore.value.variables, s) + case memoryLoad: MemoryLoad => + val lhs = memoryLoad.lhs + val rhs = memoryLoad.index.variables + val rhsUseDefs: Map[Variable, Set[Assign]] = rhs.foldLeft(Map.empty[Variable, Set[Assign]]) { + case (acc, v) => + acc + (v -> s(0)(v)) + } + (s(0) + (lhs -> Set(memoryLoad)), rhsUseDefs) case assume: Assume => transformUses(assume.body.variables, s) case indirectCall: IndirectCall => diff --git a/src/main/scala/analysis/ReachingDefs.scala b/src/main/scala/analysis/ReachingDefs.scala index 5a2bb2f2a..f60e9ffc6 100644 --- a/src/main/scala/analysis/ReachingDefs.scala +++ b/src/main/scala/analysis/ReachingDefs.scala @@ -1,7 +1,7 @@ package analysis import analysis.solvers.SimplePushDownWorklistFixpointSolver -import ir.{Assert, Assume, BitVecType, CFGPosition, Call, DirectCall, Expr, GoTo, IndirectCall, InterProcIRCursor, IntraProcIRCursor, Assign, MemoryAssign, NOP, Procedure, Program, Register, Variable, computeDomain} +import ir.{LocalAssign, CFGPosition, DirectCall, IntraProcIRCursor, MemoryLoad, Procedure, Program, Register, Variable, computeDomain} abstract class ReachingDefs(program: Program, writesTo: Map[Procedure, Set[Register]]) extends Analysis[Map[CFGPosition, Map[Variable, Set[CFGPosition]]]] { @@ -11,8 +11,10 @@ abstract class ReachingDefs(program: Program, writesTo: Map[Procedure, Set[Regis def transfer(n: CFGPosition, s: Map[Variable, Set[CFGPosition]]): Map[Variable, Set[CFGPosition]] = { n match { - case loc: Assign => + case loc: LocalAssign => s + (loc.lhs -> Set(n)) + case load: MemoryLoad => + s + (load.lhs -> Set(n)) case DirectCall(target, _) if target.name == "malloc" => s + (mallocRegister -> Set(n)) case DirectCall(target, _) if writesTo.contains(target) => @@ -27,6 +29,6 @@ abstract class ReachingDefs(program: Program, writesTo: Map[Procedure, Set[Regis } -class ReachingDefsAnalysis(program: Program, writesTo: Map[Procedure, Set[Register]]) extends ReachingDefs(program, writesTo), IRIntraproceduralForwardDependencies, +class ReachingDefsAnalysis(program: Program, writesTo: Map[Procedure, Set[Register]]) extends ReachingDefs(program, writesTo), IRIntraproceduralForwardDependencies, SimplePushDownWorklistFixpointSolver[CFGPosition, Map[Variable, Set[CFGPosition]], MapLattice[Variable, Set[CFGPosition], PowersetLattice[CFGPosition]]] diff --git a/src/main/scala/analysis/RegToMemAnalysis.scala b/src/main/scala/analysis/RegToMemAnalysis.scala deleted file mode 100644 index 1afde6f4d..000000000 --- a/src/main/scala/analysis/RegToMemAnalysis.scala +++ /dev/null @@ -1,72 +0,0 @@ -package analysis - -import ir.{MemoryLoad, *} -import analysis.solvers.* -import util.Logger - -import scala.collection.immutable - -/** - * Collects all the memory loads and the expressions that are assigned to a register but cannot be evaluated. - * - * Tracks: - * R_x = MemoryLoad[Base + Offset] - * R_x = Base + Offset - * - * Both in which constant propagation mark as TOP which is not useful. - */ -trait RegionAccessesAnalysis(program: Program, constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])]) { - - val mapLattice: MapLattice[RegisterVariableWrapper, FlatElement[Expr], FlatLattice[Expr]] = MapLattice(FlatLattice[_root_.ir.Expr]()) - - val lattice: MapLattice[CFGPosition, Map[RegisterVariableWrapper, FlatElement[Expr]], MapLattice[RegisterVariableWrapper, FlatElement[Expr], FlatLattice[Expr]]] = MapLattice(mapLattice) - - val domain: Set[CFGPosition] = program.toSet - - val first: Set[CFGPosition] = program.procedures.toSet - - /** Default implementation of eval. - */ - def eval(cmd: Statement, constants: Map[Variable, FlatElement[BitVecLiteral]], s: Map[RegisterVariableWrapper, FlatElement[Expr]]): Map[RegisterVariableWrapper, FlatElement[Expr]] = { - cmd match { - case assign: Assign => - assign.rhs match { - case memoryLoad: MemoryLoad => - s + (RegisterVariableWrapper(assign.lhs, getDefinition(assign.lhs, cmd, reachingDefs)) -> FlatEl(memoryLoad)) - case binaryExpr: BinaryExpr => - if (evaluateExpression(binaryExpr.arg1, constants).isEmpty) { // approximates Base + Offset - Logger.debug(s"Approximating $assign in $binaryExpr") - Logger.debug(s"Reaching defs: ${reachingDefs(cmd)}") - s + (RegisterVariableWrapper(assign.lhs, getDefinition(assign.lhs, cmd, reachingDefs)) -> FlatEl(binaryExpr)) - } else { - s - } - case _ => s - } - case _ => - s - } - } - - /** Transfer function for state lattice elements. - */ - def localTransfer(n: CFGPosition, s: Map[RegisterVariableWrapper, FlatElement[Expr]]): Map[RegisterVariableWrapper, FlatElement[Expr]] = n match { - case cmd: Statement => - eval(cmd, constantProp(cmd), s) - case _ => s // ignore other kinds of nodes - } - - /** Transfer function for state lattice elements. - */ - def transfer(n: CFGPosition, s: Map[RegisterVariableWrapper, FlatElement[Expr]]): Map[RegisterVariableWrapper, FlatElement[Expr]] = localTransfer(n, s) -} - -class RegionAccessesAnalysisSolver( - program: Program, - constantProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - ) extends RegionAccessesAnalysis(program, constantProp, reachingDefs) - with IRInterproceduralForwardDependencies - with Analysis[Map[CFGPosition, Map[RegisterVariableWrapper, FlatElement[Expr]]]] - with SimpleWorklistFixpointSolver[CFGPosition, Map[RegisterVariableWrapper, FlatElement[Expr]], MapLattice[RegisterVariableWrapper, FlatElement[Expr], FlatLattice[Expr]]] { -} diff --git a/src/main/scala/analysis/RegionInjector.scala b/src/main/scala/analysis/RegionInjector.scala index 3cc414f06..77f1bd89c 100644 --- a/src/main/scala/analysis/RegionInjector.scala +++ b/src/main/scala/analysis/RegionInjector.scala @@ -15,7 +15,6 @@ class MergedRegion(var name: String, val subregions: mutable.Set[MemoryRegion]) class RegionInjector(program: Program, mmm: MemoryModelMap) { private val accessToRegion = mutable.Map[Statement, Set[MemoryRegion]]() - private val loadToMemory = mutable.Map[Statement, Memory]() val mergedRegions: mutable.Map[MemoryRegion, MergedRegion] = mutable.Map() def nodeVisitor(): Unit = { @@ -87,13 +86,13 @@ class RegionInjector(program: Program, mmm: MemoryModelMap) { val mergedRegion = mergedRegions(regionsHead) access match { - case store: MemoryAssign => + case store: MemoryStore => val newMemory = replaceMemory(store.mem, regionsHead, mergedRegion) store.mem = newMemory + case load: MemoryLoad => + val newMemory = replaceMemory(load.mem, regionsHead, mergedRegion) + load.mem = newMemory case _ => - val newMemory = replaceMemory(loadToMemory(access), regionsHead, mergedRegion) - val renamer = RegionRenamer(newMemory) - renamer.visitStatement(access) } } @@ -115,43 +114,13 @@ class RegionInjector(program: Program, mmm: MemoryModelMap) { mmm.getStack(n) ++ mmm.getData(n) } - def visitExpr(expr: Expr, cmd: Statement): Unit = { - expr match { - case Extract(_, _, body) => - visitExpr(body, cmd) - case UninterpretedFunction(_, params, _) => - params.foreach { - p => visitExpr(p, cmd) - } - case Repeat(_, body) => - visitExpr(body, cmd) - case ZeroExtend(_, body) => - visitExpr(body, cmd) - case SignExtend(_, body) => - visitExpr(body, cmd) - case UnaryExpr(_, arg) => - visitExpr(arg, cmd) - case BinaryExpr(_, arg1, arg2) => - visitExpr(arg1, cmd) - visitExpr(arg2, cmd) - case m: MemoryLoad => - val regions = statementToRegions(cmd) - accessToRegion(cmd) = regions - loadToMemory(cmd) = m.mem - case _ => - } - } - def visitStatement(n: Statement): Unit = n match { - case assign: Assign => - visitExpr(assign.rhs, assign) - case m: MemoryAssign => + case m: MemoryStore => val regions = statementToRegions(m) accessToRegion(m) = regions - case assert: Assert => - visitExpr(assert.body, assert) - case assume: Assume => - visitExpr(assume.body, assume) + case m: MemoryLoad => + val regions = statementToRegions(n) + accessToRegion(n) = regions case _ => // ignore other kinds of nodes } diff --git a/src/main/scala/analysis/SummaryGenerator.scala b/src/main/scala/analysis/SummaryGenerator.scala index 18a5b4205..1a878e21d 100644 --- a/src/main/scala/analysis/SummaryGenerator.scala +++ b/src/main/scala/analysis/SummaryGenerator.scala @@ -24,35 +24,31 @@ private trait RNATaintableAnalysis( private val linkRegister = Register("R30", 64) private val framePointer = Register("R29", 64) - private val ignoreRegions: Set[Expr] = Set(linkRegister, framePointer, stackPointer) + private val ignoreRegions: Set[Variable] = Set(linkRegister, framePointer, stackPointer) def eval(cmd: Command, s: Set[Taintable]): Set[Taintable] = { - var m = s - val exprs = cmd match { + cmd match { case assume: Assume => - Set(assume.body) + s ++ assume.body.variables -- ignoreRegions case assert: Assert => - Set(assert.body) - case memoryAssign: MemoryAssign => - m = m -- getMemoryVariable(cmd, memoryAssign.mem, memoryAssign.index, memoryAssign.size, constProp, globals) - Set(memoryAssign.index, memoryAssign.value) + s ++ assert.body.variables -- ignoreRegions + case memoryStore: MemoryStore => + val m = s -- getMemoryVariable(cmd, memoryStore.mem, memoryStore.index, memoryStore.size, constProp, globals) + m ++ memoryStore.index.variables ++ memoryStore.value.variables -- ignoreRegions case indirectCall: IndirectCall => - if (ignoreRegions.contains(indirectCall.target)) return m - Set(indirectCall.target) - case assign: Assign => - m = m - assign.lhs - Set(assign.rhs) - case _ => return m - } - - exprs.foldLeft(m) { - (m, expr) => { - val vars = expr.variables.filter(!ignoreRegions.contains(_)).map { v => v: Taintable } - val memvars: Set[Taintable] = expr.loads.flatMap { - l => getMemoryVariable(cmd, l.mem, l.index, l.size, constProp, globals) + if (ignoreRegions.contains(indirectCall.target)) { + s + } else { + s + indirectCall.target -- ignoreRegions } - m.union(vars).union(memvars) - } + case assign: LocalAssign => + val m = s - assign.lhs + m ++ assign.rhs.variables -- ignoreRegions + case memoryLoad: MemoryLoad => + val m = s - memoryLoad.lhs + val memvar = getMemoryVariable(cmd, memoryLoad.mem, memoryLoad.index, memoryLoad.size, constProp, globals) + m ++ memvar ++ memoryLoad.index.variables -- ignoreRegions + case _ => s } } @@ -96,10 +92,10 @@ class SummaryGenerator( private def toGamma(variable: Taintable): Option[BExpr] = { variable match { case variable: Register => Some(variable.toGamma) - case variable: LocalVar => None + case _: LocalVar => None case variable: GlobalVariable => Some(variable.toGamma) //case variable: LocalStackVariable => None - case variable: UnknownMemory => Some(FalseBLiteral) + case _: UnknownMemory => Some(FalseBLiteral) } } diff --git a/src/main/scala/analysis/TaintAnalysis.scala b/src/main/scala/analysis/TaintAnalysis.scala index d51eda374..d18e27d48 100644 --- a/src/main/scala/analysis/TaintAnalysis.scala +++ b/src/main/scala/analysis/TaintAnalysis.scala @@ -15,7 +15,7 @@ type Taintable = Variable | GlobalVariable /*| LocalStackVariable*/ | UnknownMem /** * A global variable in memory. */ -case class GlobalVariable(val mem: Memory, val address: BitVecLiteral, val size: Int, val identifier: String) { +case class GlobalVariable(mem: Memory, address: BitVecLiteral, size: Int, identifier: String) { override def toString(): String = { s"GlobalVariable($mem, $identifier, $size, $address)" } @@ -59,7 +59,10 @@ case class UnknownMemory() { } def getMemoryVariable( - n: CFGPosition, mem: Memory, expression: Expr, size: Int, + n: CFGPosition, + mem: Memory, + expression: Expr, + size: Int, constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], globals: Map[BigInt, String], ): Option[GlobalVariable/*| LocalStackVariable*/] = { @@ -69,25 +72,23 @@ def getMemoryVariable( expression match { // TODO assumes stack var accesses are all of the form R31 + n, or just R31, when in reality they could be more complex. - case BinaryExpr(BVADD, arg1, arg2) if arg1 == stackPointer => { + case BinaryExpr(BVADD, arg1, arg2) if arg1 == stackPointer => evaluateExpression(arg2, constProp(n)) match // TODO This assumes that all stack variables are initialized local variables, which is not necessarily the case. // If a stack address is read, without being assigned a value in this procedure, it will be // assumed untainted, when in reality it may be UnknownMemory. //case Some(addr) => Some(LocalStackVariable(addr, size)) - case Some(addr) => None + case Some(_) => None case None => None - } //case v: Variable if v == stackPointer => Some(LocalStackVariable(BitVecLiteral(0, 64), size)) case v: Variable if v == stackPointer => None - case _ => { + case _ => // TOOD check that the global access has the right size evaluateExpression(expression, constProp(n)) match case Some(addr) => globals.get(addr.value) match case Some(global) => Some(GlobalVariable(mem, addr, size, global)) case None => None case None => None - } } } @@ -100,8 +101,6 @@ trait TaintAnalysisFunctions( val edgelattice = EdgeFunctionLattice(valuelattice) import edgelattice.{IdEdge, ConstEdge} - private val stackPointer = Register("R31", 64) - def edgesCallToEntry(call: DirectCall, entry: Procedure)(d: DL): Map[DL, EdgeFunction[TwoElement]] = { Map(d -> IdEdge()) } @@ -115,39 +114,34 @@ trait TaintAnalysisFunctions( } def edgesOther(n: CFGPosition)(d: DL): Map[DL, EdgeFunction[TwoElement]] = { - def containsValue(expression: Expr, value: Taintable): Boolean = { - value match { - case (v: Variable) => expression.variables.contains(v) - case v => { - expression.loads.map { - load => getMemoryVariable(n, load.mem, load.index, load.size, constProp, globals).getOrElse(UnknownMemory()) - }.contains(v) - } - } - } - (n match { - case Assign(variable, expression, _) => { + case LocalAssign(variable, expression, _) => d match { - case Left(v) => { - if containsValue(expression, v) then Map(d -> IdEdge(), Left(variable) -> IdEdge()) - else if v == variable then Map() - else Map(d -> IdEdge()) - } + case Left(v: Variable) => + if (expression.variables.contains(v)) { + Map(d -> IdEdge(), Left(variable) -> IdEdge()) + } else if (v == variable) { + Map() + } else { + Map(d -> IdEdge()) + } case _ => Map(d -> IdEdge()) } - } - case MemoryAssign(mem, index, expression, _, size, _) => { - val variable = getMemoryVariable(n, mem, index, size, constProp, globals).getOrElse(UnknownMemory()) + case MemoryStore(mem, index, expression, _, size, _) => + val variable: Taintable = getMemoryVariable(n, mem, index, size, constProp, globals).getOrElse(UnknownMemory()) d match { - case Left(v) => { - if containsValue(expression, v) then Map(d -> IdEdge(), Left(variable) -> IdEdge()) - else if variable == v && v != UnknownMemory() then Map() - else Map(d -> IdEdge()) - } - case Right(_) => Map(d -> IdEdge()) + case Left(v: Variable) if expression.variables.contains(v) => Map(d -> IdEdge(), Left(variable) -> IdEdge()) + case Left(v: GlobalVariable) if variable == v => Map() + case _ => Map(d -> IdEdge()) + } + case MemoryLoad(lhs, mem, index, _, size, _) => + val memoryVariable: Taintable = getMemoryVariable(n, mem, index, size, constProp, globals).getOrElse(UnknownMemory()) + d match { + case Left(v: Variable) if index.variables.contains(v) => Map(d -> IdEdge(), Left(lhs) -> IdEdge()) + case Left(v: Variable) if v == lhs => Map() + case Left(v: Taintable) if memoryVariable == v => Map(d -> IdEdge(), Left(lhs) -> IdEdge()) + case _ => Map(d -> IdEdge()) } - } case _ => Map(d -> IdEdge()) }) ++ ( d match diff --git a/src/main/scala/analysis/UtilMethods.scala b/src/main/scala/analysis/UtilMethods.scala index 2f74b65e8..be32662f8 100644 --- a/src/main/scala/analysis/UtilMethods.scala +++ b/src/main/scala/analysis/UtilMethods.scala @@ -81,9 +81,7 @@ def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapper } def applySingle(op: BitVecLiteral => BitVecLiteral, a: Set[BitVecLiteral]): Set[BitVecLiteral] = { - val res = for { - x <- a - } yield op(x) + val res = for (x <- a) yield op(x) res } @@ -136,9 +134,8 @@ def evaluateExpressionWithSSA(exp: Expr, constantPropResult: Map[RegisterWrapper Logger.debug("getUse: " + getUse(variable, n, reachingDefs)) constantPropResult(RegisterWrapperEqualSets(variable, getUse(variable, n, reachingDefs))) case b: BitVecLiteral => Set(b) - case Repeat(repeats, body) => evaluateExpressionWithSSA(body, constantPropResult, n, reachingDefs) - case MemoryLoad(mem, index, endian, size) => Set.empty - case UninterpretedFunction(name, params, returnType) => Set.empty + case Repeat(_, body) => evaluateExpressionWithSSA(body, constantPropResult, n, reachingDefs) + case _: UninterpretedFunction => Set.empty case _ => throw RuntimeException("ERROR: CASE NOT HANDLED: " + exp + "\n") } } @@ -153,23 +150,6 @@ def getUse(variable: Variable, node: CFGPosition, reachingDefs: Map[CFGPosition, out.getOrElse(variable, Set()) } -def unwrapExpr(expr: Expr): Option[MemoryLoad] = { - expr match { - case e: Extract => unwrapExpr(e.body) - case e: SignExtend => unwrapExpr(e.body) - case e: ZeroExtend => unwrapExpr(e.body) - case repeat: Repeat => unwrapExpr(repeat.body) - case unaryExpr: UnaryExpr => unwrapExpr(unaryExpr.arg) - case binaryExpr: BinaryExpr => // TODO: incorrect - unwrapExpr(binaryExpr.arg1) - unwrapExpr(binaryExpr.arg2) - case memoryLoad: MemoryLoad => - Some(memoryLoad) - case _ => - None - } -} - def unwrapExprToVar(expr: Expr): Option[Variable] = { expr match { case variable: Variable => @@ -182,7 +162,6 @@ def unwrapExprToVar(expr: Expr): Option[Variable] = { case binaryExpr: BinaryExpr => // TODO: incorrect unwrapExprToVar(binaryExpr.arg1) unwrapExprToVar(binaryExpr.arg2) - case memoryLoad: MemoryLoad => unwrapExprToVar(memoryLoad.index) case _ => None } diff --git a/src/main/scala/analysis/VSA.scala b/src/main/scala/analysis/VSA.scala index 37d503238..4a9f11a14 100644 --- a/src/main/scala/analysis/VSA.scala +++ b/src/main/scala/analysis/VSA.scala @@ -10,8 +10,7 @@ import scala.collection.immutable import util.Logger /** ValueSets are PowerSet of possible values */ -trait Value { -} +trait Value case class AddressValue(region: MemoryRegion) extends Value { override def toString: String = "Address(" + region + ")" @@ -43,26 +42,27 @@ trait ValueSetAnalysis(program: Program, /** Default implementation of eval. */ - def eval(cmd: Command, s: Map[Variable | MemoryRegion, Set[Value]], n: CFGPosition): Map[Variable | MemoryRegion, Set[Value]] = { - cmd match + def eval(cmd: Command, s: Map[Variable | MemoryRegion, Set[Value]]): Map[Variable | MemoryRegion, Set[Value]] = { + cmd match { case directCall: DirectCall if directCall.target.name == "malloc" => - val regions = mmm.nodeToRegion(n) + val regions = mmm.nodeToRegion(cmd) // malloc variable s + (mallocVariable -> regions.map(r => AddressValue(r))) - case localAssign: Assign => - val regions = mmm.nodeToRegion(n) + case localAssign: LocalAssign => + val regions = mmm.nodeToRegion(cmd) if (regions.nonEmpty) { s + (localAssign.lhs -> regions.map(r => AddressValue(r))) } else { - evaluateExpression(localAssign.rhs, constantProp(n)) match { + evaluateExpression(localAssign.rhs, constantProp(cmd)) match { case Some(bitVecLiteral: BitVecLiteral) => val possibleData = canCoerceIntoDataRegion(bitVecLiteral, 1) - if (possibleData.isDefined) { - s + (localAssign.lhs -> Set(AddressValue(possibleData.get))) - } else { - s + (localAssign.lhs -> Set(LiteralValue(bitVecLiteral))) - } + if (possibleData.isDefined) { + s + (localAssign.lhs -> Set(AddressValue(possibleData.get))) + } else { + s + (localAssign.lhs -> Set(LiteralValue(bitVecLiteral))) + } case None => + // TODO this is not at all sound val unwrapValue = unwrapExprToVar(localAssign.rhs) unwrapValue match { case Some(v: Variable) => @@ -73,33 +73,50 @@ trait ValueSetAnalysis(program: Program, } } } - case memAssign: MemoryAssign => - val regions = mmm.nodeToRegion(n) - evaluateExpression(memAssign.value, constantProp(n)) match { + case load: MemoryLoad => + val regions = mmm.nodeToRegion(cmd) + if (regions.nonEmpty) { + s + (load.lhs -> regions.map(r => AddressValue(r))) + } else { + // TODO this is blatantly incorrect but maintaining current functionality to start + val unwrapValue = unwrapExprToVar(load.index) + unwrapValue match { + case Some(v: Variable) => + s + (load.lhs -> s(v)) + case None => + Logger.debug(s"Too Complex: ${load.index}") // do nothing + s + } + } + case store: MemoryStore => + val regions = mmm.nodeToRegion(cmd) + evaluateExpression(store.value, constantProp(cmd)) match { case Some(bitVecLiteral: BitVecLiteral) => - val possibleData = canCoerceIntoDataRegion(bitVecLiteral, memAssign.size) + val possibleData = canCoerceIntoDataRegion(bitVecLiteral, store.size) if (possibleData.isDefined) { s ++ regions.map(r => r -> Set(AddressValue(possibleData.get))) } else { s ++ regions.map(r => r -> Set(LiteralValue(bitVecLiteral))) } case None => - val unwrapValue = unwrapExprToVar(memAssign.value) + // TODO: unsound + val unwrapValue = unwrapExprToVar(store.value) unwrapValue match { case Some(v: Variable) => s ++ regions.map(r => r -> s(v)) case None => - Logger.debug(s"Too Complex: $memAssign.value") // do nothing + Logger.debug(s"Too Complex: $store.value") // do nothing s } } case _ => s + } } - /** Transfer function for state lattice elements. + /** Transfer function for state lattice elements. (Same as `localTransfer` for simple value analysis.) */ - def localTransfer(n: CFGPosition, s: Map[Variable | MemoryRegion, Set[Value]]): Map[Variable | MemoryRegion, Set[Value]] = { + def transferUnlifted(n: CFGPosition, s: Map[Variable | MemoryRegion, Set[Value]]): Map[Variable | MemoryRegion, Set[Value]] = { n match { case p: Procedure => mmm.pushContext(p.name) @@ -108,15 +125,11 @@ trait ValueSetAnalysis(program: Program, mmm.popContext() s case command: Command => - eval(command, s, n) + eval(command, s) case _ => s } } - - /** Transfer function for state lattice elements. (Same as `localTransfer` for simple value analysis.) - */ - def transferUnlifted(n: CFGPosition, s: Map[Variable | MemoryRegion, Set[Value]]): Map[Variable | MemoryRegion, Set[Value]] = localTransfer(n, s) } class ValueSetAnalysisSolver( diff --git a/src/main/scala/analysis/VariableDependencyAnalysis.scala b/src/main/scala/analysis/VariableDependencyAnalysis.scala index ed1ed1eaf..643bf5b6f 100644 --- a/src/main/scala/analysis/VariableDependencyAnalysis.scala +++ b/src/main/scala/analysis/VariableDependencyAnalysis.scala @@ -37,46 +37,50 @@ trait ProcVariableDependencyAnalysisFunctions( def edgesCallToAfterCall(call: DirectCall, aftercall: Command)(d: DL): Map[DL, EdgeFunction[Set[Taintable]]] = { d match { - case Left(v) => varDepsSummaries.get(call.target).flatMap(_.get(v).map( _.foldLeft(Map[DL, EdgeFunction[Set[Taintable]]]()) { - (m, d) => m + (Left(d) -> IdEdge()) - })).getOrElse(Map()) + case Left(v) => + varDepsSummaries.get(call.target).flatMap { + _.get(v).map { + _.foldLeft(Map[DL, EdgeFunction[Set[Taintable]]]()) { + (m, d) => m + (Left(d) -> IdEdge()) + } + } + }.getOrElse(Map()) case Right(_) => Map(d -> IdEdge()) } } def edgesOther(n: CFGPosition)(d: DL): Map[DL, EdgeFunction[Set[Taintable]]] = { - def getVars(expression: Expr): Set[Taintable] = { - expression.variables.map { v => v: Taintable } ++ - expression.loads.map { l => getMemoryVariable(n, l.mem, l.index, l.size, constProp, globals).getOrElse(UnknownMemory()) } - } - if n == procedure then d match { // At the start of the procedure, no variables should depend on anything but themselves. case Left(_) => Map() - case Right(_) => { + case Right(_) => variables.foldLeft(Map(d -> IdEdge())) { (m: Map[DL, EdgeFunction[Set[Taintable]]], v) => m + (Left(v) -> ConstEdge(Set(v))) } - } } else n match { - case Assign(assigned, expression, _) => { - val vars = getVars(expression) -- ignoredRegisters + case LocalAssign(assigned, expression, _) => + val vars = expression.variables -- ignoredRegisters d match { - case Left(v) if vars.contains(v) => Map(d -> IdEdge(), Left(assigned) -> IdEdge()) - case Left(v) if v == assigned => Map() + case Left(v: Variable) if vars.contains(v) => Map(d -> IdEdge(), Left(assigned) -> IdEdge()) + case Left(v: Variable) if v == assigned => Map() case _ => Map(d -> IdEdge()) } - } - case MemoryAssign(mem, index, expression, _, size, _) => { - val assigned = getMemoryVariable(n, mem, index, size, constProp, globals).getOrElse(UnknownMemory()) - - val vars = getVars(expression) -- ignoredRegisters + case MemoryStore(mem, index, expression, _, size, _) => + val assigned: Taintable = getMemoryVariable(n, mem, index, size, constProp, globals).getOrElse(UnknownMemory()) + val vars = expression.variables -- ignoredRegisters d match { - case Left(v) if vars.contains(v) => Map(d -> IdEdge(), Left(assigned) -> IdEdge()) - case Left(v) if v == assigned && v != UnknownMemory() => Map() + case Left(v: Variable) if vars.contains(v) => Map(d -> IdEdge(), Left(assigned) -> IdEdge()) + case Left(v: GlobalVariable) if v == assigned => Map() + case _ => Map(d -> IdEdge()) + } + case MemoryLoad(lhs, mem, index, _, size, _) => + val memoryVariable: Taintable = getMemoryVariable(n, mem, index, size, constProp, globals).getOrElse(UnknownMemory()) + val vars: Set[Taintable] = Set(memoryVariable) ++ index.variables -- ignoredRegisters + d match { + case Left(v) if vars.contains(v) => Map(d -> IdEdge(), Left(lhs) -> IdEdge()) + case Left(v) if v == lhs => Map() case _ => Map(d -> IdEdge()) } - } case _ => Map(d -> IdEdge()) } } @@ -97,7 +101,7 @@ class ProcVariableDependencyAnalysis( { override def phase2Init: Set[Taintable] = Set(Register("R0", 64)) - override val startNode: CFGPosition = procedure + override def start: CFGPosition = procedure } class VariableDependencyAnalysis( @@ -107,7 +111,7 @@ class VariableDependencyAnalysis( constProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], scc: mutable.ListBuffer[mutable.Set[Procedure]], ) { - val varDepVariables: Set[analysis.Taintable] = 0.to(28).map { n => + val varDepVariables: Set[Taintable] = 0.to(28).map { n => Register(s"R$n", 64) }.toSet ++ specGlobals.map { g => analysis.GlobalVariable(dsl.mem, BitVecLiteral(g.address, 64), g.size, g.name) diff --git a/src/main/scala/analysis/WriteToAnalysis.scala b/src/main/scala/analysis/WriteToAnalysis.scala index 9e3139297..cc4e3eba2 100644 --- a/src/main/scala/analysis/WriteToAnalysis.scala +++ b/src/main/scala/analysis/WriteToAnalysis.scala @@ -1,6 +1,6 @@ package analysis -import ir.{Assert, Assume, BitVecType, Call, DirectCall, GoTo, Assign, MemoryAssign, NOP, Procedure, Program, Register} +import ir.{DirectCall, LocalAssign, MemoryLoad, MemoryStore, Procedure, Program, Register} import scala.collection.mutable @@ -24,18 +24,19 @@ class WriteToAnalysis(program: Program) extends Analysis[Map[Procedure, Set[Regi writesTo(proc) else val writtenTo: mutable.Set[Register] = mutable.Set() - proc.blocks.foreach( - block => - block.statements.foreach { - case Assign(variable: Register, _, _) if paramRegisters.contains(variable) => - writtenTo.add(variable) - case DirectCall(target, _) if target.name == "malloc" => - writtenTo.add(mallocRegister) - case DirectCall(target, _) if program.procedures.contains(target) => - writtenTo.addAll(getWritesTos(target)) - case _ => - } - ) + proc.blocks.foreach { block => + block.statements.foreach { + case LocalAssign(variable: Register, _, _) if paramRegisters.contains(variable) => + writtenTo.add(variable) + case MemoryLoad(lhs: Register, _, _, _, _, _) if paramRegisters.contains(lhs) => + writtenTo.add(lhs) + case DirectCall(target, _) if target.name == "malloc" => + writtenTo.add(mallocRegister) + case DirectCall(target, _) if program.procedures.contains(target) => + writtenTo.addAll(getWritesTos(target)) + case _ => + } + } writesTo.update(proc, writtenTo.toSet) writesTo(proc) diff --git a/src/main/scala/analysis/data_structure_analysis/Graph.scala b/src/main/scala/analysis/data_structure_analysis/Graph.scala index 1829bbeb3..a7cae0af4 100644 --- a/src/main/scala/analysis/data_structure_analysis/Graph.scala +++ b/src/main/scala/analysis/data_structure_analysis/Graph.scala @@ -48,36 +48,37 @@ class Graph(val proc: Procedure, // collect all stack access and their maximum accessed size // BigInt is the offset of the stack position and Int is it's size - private val stackAccesses: Map[BigInt, Int] = computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(Map[BigInt, Int]()) { - (results, pos) => - pos match - case Assign(_, expr, _) => - expr match - case MemoryLoad(_, index, _, size) => - visitStackAccess(pos, index, size).foldLeft(results) { - (res, access) => - if !res.contains(access.offset) || (res.getOrElse(access.offset, -1) < access.size) then - res + (access.offset -> access.size) - else - res - } - case _ => - visitStackAccess(pos, expr, 0).foldLeft(results) { - (res, access) => - if !res.contains(access.offset) || (res.getOrElse(access.offset, -1) < access.size) then - res + (access.offset -> access.size) - else - res - } - case MemoryAssign(_, index, _, _, size, _) => - visitStackAccess(pos, index, size).foldLeft(results) { - (res, access) => - if !res.contains(access.offset) || (res.getOrElse(access.offset, -1) < access.size) then - res + (access.offset -> access.size) - else - res - } - case _ => results + private val stackAccesses: Map[BigInt, Int] = { + computeDomain(IntraProcIRCursor, Set(proc)).toSeq.sortBy(_.toShortString).foldLeft(Map[BigInt, Int]()) { + (results, pos) => + pos match { + case LocalAssign(_, expr, _) => + visitStackAccess(pos, expr, 0).foldLeft(results) { + (res, access) => + if !res.contains(access.offset) || (res.getOrElse(access.offset, -1) < access.size) then + res + (access.offset -> access.size) + else + res + } + case MemoryStore(_, index, _, _, size, _) => + visitStackAccess(pos, index, size).foldLeft(results) { + (res, access) => + if !res.contains(access.offset) || (res.getOrElse(access.offset, -1) < access.size) then + res + (access.offset -> access.size) + else + res + } + case MemoryLoad(_, _, index, _, size, _) => + visitStackAccess(pos, index, size).foldLeft(results) { + (res, access) => + if !res.contains(access.offset) || (res.getOrElse(access.offset, -1) < access.size) then + res + (access.offset -> access.size) + else + res + } + case _ => results + } + } } private case class StackAccess(offset: BigInt, size: Int) @@ -271,9 +272,9 @@ class Graph(val proc: Procedure, if (varName.startsWith("#")) { varName = s"LocalVar_${varName.drop(1)}" } - structs.append(DotStruct(s"SSA_${id}_${varName}", s"SSA_${pos}_${varName}", None, false)) + structs.append(DotStruct(s"SSA_${id}_$varName", s"SSA_${pos}_$varName", None, false)) val value = find(slice) - arrows.append(StructArrow(DotStructElement(s"SSA_${id}_${varName}", None), DotStructElement(value.node.id.toString, Some(value.cell.offset.toString)), value.internalOffset.toString)) + arrows.append(StructArrow(DotStructElement(s"SSA_${id}_$varName", None), DotStructElement(value.node.id.toString, Some(value.cell.offset.toString)), value.internalOffset.toString)) } } @@ -601,7 +602,7 @@ class Graph(val proc: Procedure, val varToCell = mutable.Map[CFGPosition, mutable.Map[Variable, Slice]]() val domain = computeDomain(IntraProcIRCursor, Set(proc)) domain.foreach { - case pos @ Assign(variable, value, _) => + case pos @ LocalAssign(variable, value, _) => value.variables.foreach { v => if (isFormal(pos, v)) { val node = Node(Some(this)) @@ -612,6 +613,17 @@ class Graph(val proc: Procedure, } val node = Node(Some(this)) varToCell(pos) = mutable.Map(variable -> Slice(node.cells(0), 0)) + case pos @ MemoryLoad(lhs, _, index, _, _, _) => + index.variables.foreach { v => + if (isFormal(pos, v)) { + val node = Node(Some(this)) + node.flags.incomplete = true + nodes.add(node) + formals.update(v, Slice(node.cells(0), 0)) + } + } + val node = Node(Some(this)) + varToCell(pos) = mutable.Map(lhs -> Slice(node.cells(0), 0)) case pos @ DirectCall(target, _) if target.name == "malloc" => val node = Node(Some(this)) varToCell(pos) = mutable.Map(mallocRegister -> Slice(node.cells(0), 0)) @@ -622,7 +634,7 @@ class Graph(val proc: Procedure, result(variable) = Slice(node.cells(0), 0) } varToCell(pos) = result - case pos @ MemoryAssign(_, _, expr, _, _, _) => + case pos @ MemoryStore(_, _, expr, _, _, _) => unwrapPaddingAndSlicing(expr) match { case value: Variable => if (isFormal(pos, value)) { diff --git a/src/main/scala/analysis/data_structure_analysis/LocalPhase.scala b/src/main/scala/analysis/data_structure_analysis/LocalPhase.scala index 9d9dac1f7..cd975bbb4 100644 --- a/src/main/scala/analysis/data_structure_analysis/LocalPhase.scala +++ b/src/main/scala/analysis/data_structure_analysis/LocalPhase.scala @@ -144,7 +144,7 @@ class LocalPhase(proc: Procedure, * Handles unification for instructions of the form R_x = R_y [+ offset] where R_y is a pointer and [+ offset] is optional * @param position the cfg position being visited (note this might be a local assign of the form R_x = R_y [+ offset] * or it might be memory load/store where the index is of the form R_y [+ offset] - * @param lhs Ev(R_x) if position is local assign or a cell from an empty node if R_y [+ offset] is the index of a memoryAssign + * @param lhs Ev(R_x) if position is local assign or a cell from an empty node if R_y [+ offset] is the index of a memoryStore * @param rhs R_y, reachingDefs(position)(R_y) can be used to find the set of SSA variables that may define R_x * @param pointee if false, the position is local pointer arithmetic therefore Ev(R_y [+ offset]) is merged with lhs * else, the position is a memory read/write therefore E(Ev(R_y [+ offset])) is merged with lhs @@ -245,11 +245,11 @@ class LocalPhase(proc: Procedure, val returnArgument = graph.varToCell(n)(variable) graph.mergeCells(graph.adjust(returnArgument), graph.adjust(slice)) } - case Assign(variable, rhs, _) => + case LocalAssign(variable, rhs, _) => val expr: Expr = unwrapPaddingAndSlicing(rhs) val lhsCell = graph.adjust(graph.varToCell(n)(variable)) - var global = isGlobal(rhs, n) - var stack = isStack(rhs, n) + val global = isGlobal(rhs, n) + val stack = isStack(rhs, n) if global.isDefined then // Rx = global address graph.mergeCells(lhsCell, global.get) else if stack.isDefined then // Rx = stack address @@ -271,38 +271,39 @@ class LocalPhase(proc: Procedure, // Rx = Ry merge corresponding cells to Rx and Ry case arg: Variable /*if varToSym.contains(n) && varToSym(n).contains(arg)*/ => visitPointerArithmeticOperation(n, lhsCell, arg, 0) - - case MemoryLoad(_, index, _, size) => // Rx = Mem[Ry], merge Rx and pointee of Ry (E(Ry)) - assert(size % 8 == 0) - val byteSize = size/8 - lhsCell.node.get.flags.read = true - global = isGlobal(index, n, byteSize) - stack = isStack(index, n) - if global.isDefined then - graph.mergeCells(lhsCell,graph.adjust(graph.find(global.get).getPointee)) - else if stack.isDefined then - graph.mergeCells(lhsCell, graph.adjust(graph.find(stack.get).getPointee)) - else - index match - case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) => - evaluateExpression(arg2, constProp(n)) match - case Some(v) => -// assert(varToSym(n).contains(arg1)) - val offset = v.value - visitPointerArithmeticOperation(n, lhsCell, arg1, byteSize, true, offset) - case None => -// assert(varToSym(n).contains(arg1)) - // collapse the result -// visitPointerArithmeticOperation(n, lhsCell, arg1, byteSize, true, 0, true) - unsupportedPointerArithmeticOperation(n, index,Node(Some(graph)).cells(0)) - case arg: Variable => -// assert(varToSym(n).contains(arg)) - visitPointerArithmeticOperation(n, lhsCell, arg, byteSize, true) - case _ => ??? case _ => unsupportedPointerArithmeticOperation(n, expr, lhsCell) - - case MemoryAssign(_, ind, expr, _, size, _) => + + case MemoryLoad(lhs, _, index, _, size, _) => // Rx = Mem[Ry], merge Rx and pointee of Ry (E(Ry)) + val indexUnwrapped = unwrapPaddingAndSlicing(index) + val lhsCell = graph.adjust(graph.varToCell(n)(lhs)) + assert(size % 8 == 0) + val byteSize = size / 8 + lhsCell.node.get.flags.read = true + val global = isGlobal(indexUnwrapped, n, byteSize) + val stack = isStack(indexUnwrapped, n) + if global.isDefined then + graph.mergeCells(lhsCell, graph.adjust(graph.find(global.get).getPointee)) + else if stack.isDefined then + graph.mergeCells(lhsCell, graph.adjust(graph.find(stack.get).getPointee)) + else + indexUnwrapped match + case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) => + evaluateExpression(arg2, constProp(n)) match + case Some(v) => + // assert(varToSym(n).contains(arg1)) + val offset = v.value + visitPointerArithmeticOperation(n, lhsCell, arg1, byteSize, true, offset) + case None => + // assert(varToSym(n).contains(arg1)) + // collapse the result + // visitPointerArithmeticOperation(n, lhsCell, arg1, byteSize, true, 0, true) + unsupportedPointerArithmeticOperation(n, indexUnwrapped, Node(Some(graph)).cells(0)) + case arg: Variable => + // assert(varToSym(n).contains(arg)) + visitPointerArithmeticOperation(n, lhsCell, arg, byteSize, true) + case _ => ??? + case MemoryStore(_, ind, expr, _, size, _) => val unwrapped = unwrapPaddingAndSlicing(expr) unwrapped match { // Mem[Ry] = Rx diff --git a/src/main/scala/analysis/data_structure_analysis/SymbolicAddressAnalysis.scala b/src/main/scala/analysis/data_structure_analysis/SymbolicAddressAnalysis.scala index 52cb23639..a66b7f616 100644 --- a/src/main/scala/analysis/data_structure_analysis/SymbolicAddressAnalysis.scala +++ b/src/main/scala/analysis/data_structure_analysis/SymbolicAddressAnalysis.scala @@ -89,7 +89,7 @@ trait SymbolicAddressFunctions(constProp: Map[CFGPosition, Map[Variable, FlatEle def edgesOther(n: CFGPosition)(d: DL): Map[DL, EdgeFunction[TwoElement]] = n match - case Assign(variable, rhs, _) => + case LocalAssign(variable, rhs, _) => val expr = unwrapPaddingAndSlicing(rhs) expr match case BinaryExpr(op, arg1: Variable, arg2) if op.equals(BVADD) => @@ -115,7 +115,7 @@ trait SymbolicAddressFunctions(constProp: Map[CFGPosition, Map[Variable, FlatEle case Left(value) if value.accessor == variable => Map() case _ => Map(d -> IdEdge()) case None => Map(d -> IdEdge()) - case arg:Variable => + case arg: Variable => d match case Left(value) if value.accessor == arg => val result: Map[DL, EdgeFunction[TwoElement]] = Map(Left(SymbolicAddress(variable, value.symbolicBase, value.offset)) -> ConstEdge(TwoElementTop)) @@ -125,15 +125,15 @@ trait SymbolicAddressFunctions(constProp: Map[CFGPosition, Map[Variable, FlatEle result case Left(value) if value.accessor == variable => Map() case _ => Map(d -> IdEdge()) - case _: MemoryLoad => - d match - case Left(value) if value.accessor == variable => Map() - case Left(_) => Map(d -> IdEdge()) - case Right(_) => Map(d -> IdEdge(), Left(SymbolicAddress(variable, UnknownLocation(nextunknownCount, IRWalk.procedure(n)), 0)) -> ConstEdge(TwoElementTop)) case _ => d match case Left(value) if value.accessor == variable => Map() case _ => Map(d -> IdEdge()) + case MemoryLoad(lhs, _, _, _, _, _) => + d match + case Left(value) if value.accessor == lhs => Map() + case Left(_) => Map(d -> IdEdge()) + case Right(_) => Map(d -> IdEdge(), Left(SymbolicAddress(lhs, UnknownLocation(nextunknownCount, IRWalk.procedure(n)), 0)) -> ConstEdge(TwoElementTop)) case DirectCall(target, _) if target.name == "malloc" => d match case Left(value) if value.accessor == mallocVariable => Map() diff --git a/src/main/scala/analysis/data_structure_analysis/Utility.scala b/src/main/scala/analysis/data_structure_analysis/Utility.scala index 002c1ec12..6d89db0d7 100644 --- a/src/main/scala/analysis/data_structure_analysis/Utility.scala +++ b/src/main/scala/analysis/data_structure_analysis/Utility.scala @@ -246,7 +246,6 @@ def unwrapPaddingAndSlicing(expr: Expr): Expr = case SignExtend(extension, body) => SignExtend(extension, unwrapPaddingAndSlicing(body)) case UnaryExpr(op, arg) => UnaryExpr(op, arg) case BinaryExpr(op, arg1, arg2) => BinaryExpr(op, unwrapPaddingAndSlicing(arg1), unwrapPaddingAndSlicing(arg2)) - case MemoryLoad(mem, index, endian, size) => MemoryLoad(mem, unwrapPaddingAndSlicing(index), endian, size) case variable: Variable => variable case Extract(_, _, body) /*if start == 0 && end == 32*/ => unwrapPaddingAndSlicing(body) // this may make it unsound case ZeroExtend(_, body) => unwrapPaddingAndSlicing(body) diff --git a/src/main/scala/analysis/solvers/IDESolver.scala b/src/main/scala/analysis/solvers/IDESolver.scala index c057b98f0..4b60e2e16 100644 --- a/src/main/scala/analysis/solvers/IDESolver.scala +++ b/src/main/scala/analysis/solvers/IDESolver.scala @@ -24,7 +24,8 @@ abstract class IDESolver[E <: Procedure | Command, EE <: Procedure | Command, C protected def isExit(exit: CFGPosition): Boolean protected def getAfterCalls(exit: EE): Set[R] - def phase2Init = valuelattice.top + def phase2Init: T = valuelattice.top + def start: CFGPosition = startNode /** * Phase 1 of the IDE algorithm. @@ -32,10 +33,10 @@ abstract class IDESolver[E <: Procedure | Command, EE <: Procedure | Command, C * The original version of the algorithm uses summary edges from call nodes to after-call nodes * instead of `callJumpCache` and `exitJumpCache`. */ - private class Phase1(val program: Program) extends InitializingPushDownWorklistFixpointSolver[(CFGPosition, DL, DL), EdgeFunction[T], EdgeFunctionLattice[T, L]] { + private class Phase1 extends InitializingPushDownWorklistFixpointSolver[(CFGPosition, DL, DL), EdgeFunction[T], EdgeFunctionLattice[T, L]] { val lattice: MapLattice[(CFGPosition, DL, DL), EdgeFunction[T], EdgeFunctionLattice[T, L]] = MapLattice(edgelattice) - val first: Set[(CFGPosition, DL, DL)] = Set((startNode, Right(Lambda()), Right(Lambda()))) + val first: Set[(CFGPosition, DL, DL)] = Set((start, Right(Lambda()), Right(Lambda()))) /** * callJumpCache(funentry, d1, call)(d3) returns the composition of the edges (call.funentry, d3) -> (call, *) -> (funentry, d1). @@ -77,7 +78,6 @@ abstract class IDESolver[E <: Procedure | Command, EE <: Procedure | Command, C } } - def process(n: (CFGPosition, DL, DL)): Unit = { val (position, d1, d2) = n val e1 = x(n) @@ -142,14 +142,14 @@ abstract class IDESolver[E <: Procedure | Command, EE <: Procedure | Command, C * Performs a forward dataflow analysis using the decomposed lattice and the micro-transformers. * The original RHS version of IDE uses jump functions for all nodes, not only at exits, but the analysis result and complexity is the same. */ - private class Phase2(val program: Program, val phase1: Phase1) extends InitializingPushDownWorklistFixpointSolver[(CFGPosition, DL), T, L]: + private class Phase2(val phase1: Phase1) extends InitializingPushDownWorklistFixpointSolver[(CFGPosition, DL), T, L] { val lattice: MapLattice[(CFGPosition, DL), T, L] = MapLattice(valuelattice) - val first: Set[(CFGPosition, DL)] = Set((startNode, Right(Lambda()))) + val first: Set[(CFGPosition, DL)] = Set((start, Right(Lambda()))) /** - * Function summaries from phase 1. - * Built when first invoked. - */ + * Function summaries from phase 1. + * Built when first invoked. + */ lazy val summaries: mutable.Map[Procedure, mutable.Map[DL, mutable.Map[DL, EdgeFunction[T]]]] = phase1.summaries() def init: T = phase2Init @@ -188,21 +188,23 @@ abstract class IDESolver[E <: Procedure | Command, EE <: Procedure | Command, C val restructuredlattice: MapLattice[CFGPosition, Map[D, T], MapLattice[D, T, L]] = MapLattice(MapLattice(valuelattice)) /** - * Restructures the analysis output to match `restructuredlattice`. - */ - def restructure(y: lattice.Element): restructuredlattice.Element = + * Restructures the analysis output to match `restructuredlattice`. + */ + def restructure(y: lattice.Element): restructuredlattice.Element = { y.foldLeft(Map[CFGPosition, Map[D, valuelattice.Element]]()) { case (acc, ((n, dl), e)) => dl match { case Left(d) => acc + (n -> (acc.getOrElse(n, Map[D, valuelattice.Element]()) + (d -> e))) case _ => acc } } + } + } def analyze(): Map[CFGPosition, Map[D, T]] = { if (program.mainProcedure.blocks.nonEmpty && program.mainProcedure.returnBlock.isDefined && program.mainProcedure.entryBlock.isDefined) { - val phase1 = Phase1(program) + val phase1 = Phase1() phase1.analyze() - val phase2 = Phase2(program, phase1) + val phase2 = Phase2(phase1) phase2.restructure(phase2.analyze()) } else { Logger.warn(s"Disabling IDE solver tests due to external main procedure: ${program.mainProcedure.name}") @@ -224,7 +226,7 @@ abstract class ForwardIDESolver[D, T, L <: Lattice[T]](program: Program) protected def returnToCall(ret: Command): DirectCall = ret match { case ret: Statement => ret.parent.statements.getPrev(ret).asInstanceOf[DirectCall] - case r: Jump => ret.parent.statements.last.asInstanceOf[DirectCall] + case _: Jump => ret.parent.statements.last.asInstanceOf[DirectCall] } protected def getCallee(call: DirectCall): Procedure = { @@ -232,16 +234,20 @@ abstract class ForwardIDESolver[D, T, L <: Lattice[T]](program: Program) call.target } - protected def isCall(call: CFGPosition): Boolean = - call match - case directCall: DirectCall if (!directCall.successor.isInstanceOf[Unreachable] && directCall.target.returnBlock.isDefined && directCall.target.entryBlock.isDefined) => true + protected def isCall(call: CFGPosition): Boolean = { + call match { + case directCall: DirectCall if !directCall.successor.isInstanceOf[Unreachable] && directCall.target.returnBlock.isDefined && directCall.target.entryBlock.isDefined => true case _ => false + } + } - protected def isExit(exit: CFGPosition): Boolean = - exit match + protected def isExit(exit: CFGPosition): Boolean = { + exit match { // only looking at functions with statements - case command: Return => true + case _: Return => true case _ => false + } + } protected def getAfterCalls(exit: Return): Set[Command] = InterProcIRCursor.succ(exit).filter(_.isInstanceOf[Command]).map(_.asInstanceOf[Command]) @@ -258,7 +264,7 @@ abstract class BackwardIDESolver[D, T, L <: Lattice[T]](program: Program) protected def callToReturn(call: Command): DirectCall = { IRWalk.prevCommandInBlock(call) match { - case Some(x : DirectCall) => x + case Some(x: DirectCall) => x case p => throw Exception(s"Not a return/aftercall node $call .... prev = $p") } } @@ -271,22 +277,25 @@ abstract class BackwardIDESolver[D, T, L <: Lattice[T]](program: Program) procCalled.returnBlock.getOrElse(throw Exception(s"No return node for procedure ${procCalled}")).jump.asInstanceOf[Return] } - protected def isCall(call: CFGPosition): Boolean = - call match - case c: Unreachable => false /* don't process non-returning calls */ - case c : Command => { + protected def isCall(call: CFGPosition): Boolean = { + call match { + case _: Unreachable => false /* don't process non-returning calls */ + case c: Command => val call = IRWalk.prevCommandInBlock(c) call match { case Some(d: DirectCall) if d.target.returnBlock.isDefined => true case _ => false } - } case _ => false + } + } - protected def isExit(exit: CFGPosition): Boolean = - exit match + protected def isExit(exit: CFGPosition): Boolean = { + exit match { case procedure: Procedure => procedure.blocks.nonEmpty case _ => false + } + } protected def getAfterCalls(exit: Procedure): Set[DirectCall] = exit.incomingCalls().toSet } diff --git a/src/main/scala/bap/BAPExpr.scala b/src/main/scala/bap/BAPExpr.scala index 7408328dd..0f2591e3d 100644 --- a/src/main/scala/bap/BAPExpr.scala +++ b/src/main/scala/bap/BAPExpr.scala @@ -5,8 +5,6 @@ import ir._ /** Expression */ trait BAPExpr { - def toIR: Expr - /* * The size of output of the given expression. * @@ -18,8 +16,6 @@ trait BAPExpr { /** Concatenation of two bitvectors */ case class BAPConcat(left: BAPExpr, right: BAPExpr) extends BAPExpr { - def toIR: BinaryExpr = BinaryExpr(BVCONCAT, left.toIR, right.toIR) - override val size: Int = left.size + right.size } @@ -28,14 +24,6 @@ case class BAPConcat(left: BAPExpr, right: BAPExpr) extends BAPExpr { case class BAPSignedExtend(width: Int, body: BAPExpr) extends BAPExpr { override val size: Int = width - - override def toIR: Expr = { - if (width > body.size) { - SignExtend(width - body.size, body.toIR) - } else { - BAPExtract(width - 1, 0, body).toIR - } - } } /** Unsigned extend - pad in BIL @@ -43,15 +31,6 @@ case class BAPSignedExtend(width: Int, body: BAPExpr) extends BAPExpr { case class BAPUnsignedExtend(width: Int, body: BAPExpr) extends BAPExpr { override val size: Int = width - - override def toIR: Expr = { - if (width > body.size) { - ZeroExtend(width - body.size, body.toIR) - } else { - BAPExtract(width - 1, 0, body).toIR - } - - } } /** Extracts the bits from firstInt to secondInt (inclusive) from variable. @@ -61,19 +40,6 @@ case class BAPExtract(high: Int, low: Int, body: BAPExpr) extends BAPExpr { // + 1 as extracts are inclusive (e.g. [31:0] has 32 bits) override val size: Int = high - low + 1 - - override def toIR: Expr = { - val bodySize = body.size - if (size > bodySize) { - if (low == 0) { - ZeroExtend(size - bodySize, body.toIR) - } else { - Extract(high + 1, low, ZeroExtend(size - bodySize, body.toIR)) - } - } else { - Extract(high + 1, low, body.toIR) - } - } } case object BAPHighCast { @@ -90,19 +56,12 @@ case class BAPLiteral(value: BigInt, size: Int) extends BAPExpr { /** Value of literal */ override def toString: String = s"${value}bv$size" - - override def toIR: BitVecLiteral = BitVecLiteral(value, size) } /** Unary operator */ case class BAPUnOp(operator: BAPUnOperator, exp: BAPExpr) extends BAPExpr { override val size: Int = exp.size - - override def toIR: UnaryExpr = operator match { - case NOT => UnaryExpr(BVNOT, exp.toIR) - case NEG => UnaryExpr(BVNEG, exp.toIR) - } } sealed trait BAPUnOperator(op: String) { @@ -126,46 +85,6 @@ case class BAPBinOp(operator: BAPBinOperator, lhs: BAPExpr, rhs: BAPExpr) extend case EQ | NEQ | LT | LE | SLT | SLE => 1 case _ => lhs.size } - - override def toIR: Expr = operator match { - case PLUS => BinaryExpr(BVADD, lhs.toIR, rhs.toIR) - case MINUS => BinaryExpr(BVSUB, lhs.toIR, rhs.toIR) - case TIMES => BinaryExpr(BVMUL, lhs.toIR, rhs.toIR) - case DIVIDE => BinaryExpr(BVUDIV, lhs.toIR, rhs.toIR) - case SDIVIDE => BinaryExpr(BVSDIV, lhs.toIR, rhs.toIR) - // counterintuitive but correct according to BAP source - case MOD => BinaryExpr(BVSREM, lhs.toIR, rhs.toIR) - // counterintuitive but correct according to BAP source - case SMOD => BinaryExpr(BVUREM, lhs.toIR, rhs.toIR) - case LSHIFT => // BAP says caring about this case is necessary? - if (lhs.size == rhs.size) { - BinaryExpr(BVSHL, lhs.toIR, rhs.toIR) - } else { - BinaryExpr(BVSHL, lhs.toIR, ZeroExtend(lhs.size - rhs.size, rhs.toIR)) - } - case RSHIFT => - if (lhs.size == rhs.size) { - BinaryExpr(BVLSHR, lhs.toIR, rhs.toIR) - } else { - BinaryExpr(BVLSHR, lhs.toIR, ZeroExtend(lhs.size - rhs.size, rhs.toIR)) - } - case ARSHIFT => - if (lhs.size == rhs.size) { - BinaryExpr(BVASHR, lhs.toIR, rhs.toIR) - } else { - BinaryExpr(BVASHR, lhs.toIR, ZeroExtend(lhs.size - rhs.size, rhs.toIR)) - } - case AND => BinaryExpr(BVAND, lhs.toIR, rhs.toIR) - case OR => BinaryExpr(BVOR, lhs.toIR, rhs.toIR) - case XOR => BinaryExpr(BVXOR, lhs.toIR, rhs.toIR) - case EQ => BinaryExpr(BVCOMP, lhs.toIR, rhs.toIR) - case NEQ => UnaryExpr(BVNOT, BinaryExpr(BVCOMP, lhs.toIR, rhs.toIR)) - case LT => BinaryExpr(BVULT, lhs.toIR, rhs.toIR) - case LE => BinaryExpr(BVULE, lhs.toIR, rhs.toIR) - case SLT => BinaryExpr(BVSLT, lhs.toIR, rhs.toIR) - case SLE => BinaryExpr(BVSLE, lhs.toIR, rhs.toIR) - } - } sealed trait BAPBinOperator(op: String) { @@ -216,39 +135,23 @@ case object LE extends BAPBinOperator("LE") case object SLT extends BAPBinOperator("SLT") case object SLE extends BAPBinOperator("SLE") -trait BAPVariable extends BAPExpr - -trait BAPVar extends BAPVariable { +trait BAPVar extends BAPExpr { val name: String override val size: Int override def toString: String = name - override def toIR: Variable } +case class BAPRegister(override val name: String, override val size: Int) extends BAPVar -case class BAPRegister(override val name: String, override val size: Int) extends BAPVar { - override def toIR: Register = Register(s"$name", size) -} - -case class BAPLocalVar(override val name: String, override val size: Int) extends BAPVar { - override def toIR: LocalVar = LocalVar(s"$name", BitVecType(size)) -} +case class BAPLocalVar(override val name: String, override val size: Int) extends BAPVar /** A load from memory at location exp */ -case class BAPMemAccess(memory: BAPMemory, index: BAPExpr, endian: Endian, override val size: Int) extends BAPVariable { +case class BAPMemAccess(memory: BAPMemory, index: BAPExpr, endian: Endian, override val size: Int) extends BAPExpr { override def toString: String = s"${memory.name}[$index]" - override def toIR: MemoryLoad = { - MemoryLoad(memory.toIRMemory, index.toIR, endian, size) - } } -case class BAPMemory(name: String, addressSize: Int, valueSize: Int) extends BAPVariable { - override val size: Int = valueSize // should reconsider - override def toIR: Expr = ??? // should not encounter - def toIRMemory: Memory = SharedMemory(name, addressSize, valueSize) -} +case class BAPMemory(name: String, addressSize: Int, valueSize: Int) case class BAPStore(memory: BAPMemory, index: BAPExpr, value: BAPExpr, endian: Endian, size: Int) extends BAPExpr { - override def toIR: Expr = ??? // should not encounter override def toString: String = s"${memory.name}[$index] := $value" } diff --git a/src/main/scala/bap/BAPProgram.scala b/src/main/scala/bap/BAPProgram.scala index c586f5980..ed1f65400 100644 --- a/src/main/scala/bap/BAPProgram.scala +++ b/src/main/scala/bap/BAPProgram.scala @@ -45,15 +45,6 @@ case class BAPBlock(label: String, address: Option[BigInt], statements: List[BAP } -case class BAPParameter(name: String, size: Int, value: BAPVar) { - def toIR: Parameter = { - val register = value.toIR - register match { - case r: Register => Parameter(name, size, r) - case _ => throw Exception(s"subroutine parameter $this refers to non-register variable $value") - } - - } -} +case class BAPParameter(name: String, size: Int, value: BAPVar) case class BAPMemorySection(name: String, address: BigInt, size: Int, bytes: Seq[BAPLiteral]) diff --git a/src/main/scala/bap/BAPStatement.scala b/src/main/scala/bap/BAPStatement.scala index 09b77f60d..2f9afe34d 100644 --- a/src/main/scala/bap/BAPStatement.scala +++ b/src/main/scala/bap/BAPStatement.scala @@ -23,14 +23,12 @@ case class BAPGoTo(target: String, condition: BAPExpr, override val line: String sealed trait BAPStatement -sealed trait BAPAssign(lhs: BAPVariable, rhs: BAPExpr, line: String, instruction: String) extends BAPStatement { - override def toString: String = String.format("%s := %s;", lhs, rhs) -} - /** Memory store */ -case class BAPMemAssign(lhs: BAPMemory, rhs: BAPStore, line: String, instruction: String, address: Option[BigInt] = None) - extends BAPAssign(lhs, rhs, line, instruction) +case class BAPMemAssign(lhs: BAPMemory, rhs: BAPStore, line: String, instruction: String, address: Option[BigInt] = None) extends BAPStatement { + override def toString: String = String.format("%s := %s;", lhs, rhs) +} -case class BAPLocalAssign(lhs: BAPVar, rhs: BAPExpr, line: String, instruction: String, address: Option[BigInt] = None) - extends BAPAssign(lhs, rhs, line, instruction) +case class BAPLocalAssign(lhs: BAPVar, rhs: BAPExpr, line: String, instruction: String, address: Option[BigInt] = None) extends BAPStatement { + override def toString: String = String.format("%s := %s;", lhs, rhs) +} diff --git a/src/main/scala/ir/Expr.scala b/src/main/scala/ir/Expr.scala index 03579c2cf..d250ca9fc 100644 --- a/src/main/scala/ir/Expr.scala +++ b/src/main/scala/ir/Expr.scala @@ -5,7 +5,6 @@ import scala.collection.mutable sealed trait Expr { def toBoogie: BExpr - def loads: Set[MemoryLoad] = Set() def getType: IRType def gammas: Set[Variable] = Set() // variables not including those inside a load's index def variables: Set[Variable] = Set() @@ -54,7 +53,6 @@ case class Extract(end: Int, start: Int, body: Expr) extends Expr { override def getType: BitVecType = BitVecType(end - start) override def toString: String = s"$body[$end:$start]" override def acceptVisit(visitor: Visitor): Expr = visitor.visitExtract(this) - override def loads: Set[MemoryLoad] = body.loads } case class Repeat(repeats: Int, body: Expr) extends Expr { @@ -68,7 +66,6 @@ case class Repeat(repeats: Int, body: Expr) extends Expr { } override def toString: String = s"Repeat($repeats, $body)" override def acceptVisit(visitor: Visitor): Expr = visitor.visitRepeat(this) - override def loads: Set[MemoryLoad] = body.loads } case class ZeroExtend(extension: Int, body: Expr) extends Expr { @@ -82,7 +79,6 @@ case class ZeroExtend(extension: Int, body: Expr) extends Expr { } override def toString: String = s"ZeroExtend($extension, $body)" override def acceptVisit(visitor: Visitor): Expr = visitor.visitZeroExtend(this) - override def loads: Set[MemoryLoad] = body.loads } case class SignExtend(extension: Int, body: Expr) extends Expr { @@ -96,14 +92,12 @@ case class SignExtend(extension: Int, body: Expr) extends Expr { } override def toString: String = s"SignExtend($extension, $body)" override def acceptVisit(visitor: Visitor): Expr = visitor.visitSignExtend(this) - override def loads: Set[MemoryLoad] = body.loads } case class UnaryExpr(op: UnOp, arg: Expr) extends Expr { override def toBoogie: BExpr = UnaryBExpr(op, arg.toBoogie) override def gammas: Set[Variable] = arg.gammas override def variables: Set[Variable] = arg.variables - override def loads: Set[MemoryLoad] = arg.loads override def getType: IRType = (op, arg.getType) match { case (_: BoolUnOp, BoolType) => BoolType case (_: BVUnOp, bv: BitVecType) => bv @@ -152,7 +146,6 @@ case class BinaryExpr(op: BinOp, arg1: Expr, arg2: Expr) extends Expr { override def toBoogie: BExpr = BinaryBExpr(op, arg1.toBoogie, arg2.toBoogie) override def gammas: Set[Variable] = arg1.gammas ++ arg2.gammas override def variables: Set[Variable] = arg1.variables ++ arg2.variables - override def loads: Set[MemoryLoad] = arg1.loads ++ arg2.loads override def getType: IRType = (op, arg1.getType, arg2.getType) match { case (_: BoolBinOp, BoolType, BoolType) => BoolType case (binOp: BVBinOp, bv1: BitVecType, bv2: BitVecType) => @@ -292,22 +285,6 @@ enum Endian { case BigEndian } -case class MemoryLoad(mem: Memory, index: Expr, endian: Endian, size: Int) extends Expr { - override def toBoogie: BMemoryLoad = BMemoryLoad(mem.toBoogie, index.toBoogie, endian, size) - def toGamma(LArgs: List[BMapVar]): BExpr = mem match { - case m: StackMemory => - GammaLoad(m.toGamma, index.toBoogie, size, size / m.valueSize) - case m: SharedMemory => - BinaryBExpr(BoolOR, GammaLoad(m.toGamma, index.toBoogie, size, size / m.valueSize), L(LArgs, index.toBoogie)) - } - override def variables: Set[Variable] = index.variables - override def gammas: Set[Variable] = Set() - override def loads: Set[MemoryLoad] = Set(this) - override def getType: IRType = BitVecType(size) - override def toString: String = s"MemoryLoad($mem, $index, $endian, $size)" - override def acceptVisit(visitor: Visitor): Expr = visitor.visitMemoryLoad(this) -} - case class UninterpretedFunction(name: String, params: Seq[Expr], returnType: IRType) extends Expr { override def getType: IRType = returnType override def toBoogie: BFunctionCall = BFunctionCall(name, params.map(_.toBoogie).toList, returnType.toBoogie, true) diff --git a/src/main/scala/ir/Interpreter.scala b/src/main/scala/ir/Interpreter.scala index 53ef40c2d..204a3fda7 100644 --- a/src/main/scala/ir/Interpreter.scala +++ b/src/main/scala/ir/Interpreter.scala @@ -85,11 +85,6 @@ class Interpreter() { case BVNOT => smt_bvnot(arg) } - case ml: MemoryLoad => - Logger.debug(s"\t$ml") - val index: Int = eval(ml.index, env).value.toInt - getMemory(index, ml.size, ml.endian, mems) - case u: UninterpretedFunction => Logger.debug(s"\t$u") ??? @@ -259,26 +254,35 @@ class Interpreter() { private def interpretStatement(s: Statement): Unit = { Logger.debug(s"statement[$s]:") s match { - case assign: Assign => + case assign: LocalAssign => Logger.debug(s"LocalAssign ${assign.lhs} = ${assign.rhs}") val evalRight = eval(assign.rhs, regs) Logger.debug(s"LocalAssign ${assign.lhs} := 0x${evalRight.value.toString(16)}[u${evalRight.size}]\n") regs += (assign.lhs -> evalRight) - case assign: MemoryAssign => - Logger.debug(s"MemoryAssign ${assign.mem}[${assign.index}] = ${assign.value}") + case store: MemoryStore => + Logger.debug(s"MemoryStore ${store.mem}[${store.index}] = ${store.value}") - val index: Int = eval(assign.index, regs).value.toInt - val value: BitVecLiteral = eval(assign.value, regs) - Logger.debug(s"\tMemoryStore(mem:${assign.mem}, index:0x${index.toHexString}, value:0x${ - value.value - .toString(16) - }[u${value.size}], size:${assign.size})") + val index: Int = eval(store.index, regs).value.toInt + val value: BitVecLiteral = eval(store.value, regs) + Logger.debug(s"\tMemoryStore(mem:${store.mem}, index:0x${index.toHexString}, value:0x${ + value.value.toString(16) + }[u${value.size}], size:${store.size})") - val evalStore = setMemory(index, assign.size, assign.endian, value, mems) + val evalStore = setMemory(index, store.size, store.endian, value, mems) evalStore match { case BitVecLiteral(value, size) => - Logger.debug(s"MemoryAssign ${assign.mem} := 0x${value.toString(16)}[u$size]\n") + Logger.debug(s"MemoryStore ${store.mem} := 0x${value.toString(16)}[u$size]\n") + } + case load: MemoryLoad => + Logger.debug(s"MemoryLoad ${load.lhs} = ${load.mem}[${load.index}]") + val index: Int = eval(load.index, regs).value.toInt + Logger.debug(s"MemoryLoad ${load.lhs} := ${load.mem}[0x${index.toHexString}[u${load.size}]\n") + val evalLoad = getMemory(index, load.size, load.endian, mems) + regs += (load.lhs -> evalLoad) + evalLoad match { + case BitVecLiteral(value, size) => + Logger.debug(s"MemoryStore ${load.lhs} := 0x${value.toString(16)}[u$size]\n") } case _ : NOP => () case assert: Assert => diff --git a/src/main/scala/ir/Statement.scala b/src/main/scala/ir/Statement.scala index ce49bc82e..c1862a9ff 100644 --- a/src/main/scala/ir/Statement.scala +++ b/src/main/scala/ir/Statement.scala @@ -23,33 +23,47 @@ sealed trait Statement extends Command, IntrusiveListElement[Statement] { def acceptVisit(visitor: Visitor): Statement = throw new Exception( "visitor " + visitor + " unimplemented for: " + this ) - def successor: Command = parent.statements.nextOption(this).getOrElse(parent.jump) +} +sealed trait Assign extends Statement { + var lhs: Variable } -// invariant: rhs contains at most one MemoryLoad -class Assign(var lhs: Variable, var rhs: Expr, override val label: Option[String] = None) extends Statement { +class LocalAssign(var lhs: Variable, var rhs: Expr, override val label: Option[String] = None) extends Assign { override def modifies: Set[Global] = lhs match { case r: Register => Set(r) case _ => Set() } override def toString: String = s"$labelStr$lhs := $rhs" - override def acceptVisit(visitor: Visitor): Statement = visitor.visitAssign(this) + override def acceptVisit(visitor: Visitor): Statement = visitor.visitLocalAssign(this) } -object Assign: - def unapply(l: Assign): Option[(Variable, Expr, Option[String])] = Some(l.lhs, l.rhs, l.label) +object LocalAssign: + def unapply(l: LocalAssign): Option[(Variable, Expr, Option[String])] = Some(l.lhs, l.rhs, l.label) -// invariant: index and value do not contain MemoryLoads -class MemoryAssign(var mem: Memory, var index: Expr, var value: Expr, var endian: Endian, var size: Int, override val label: Option[String] = None) extends Statement { +class MemoryStore(var mem: Memory, var index: Expr, var value: Expr, var endian: Endian, var size: Int, override val label: Option[String] = None) extends Statement { override def modifies: Set[Global] = Set(mem) override def toString: String = s"$labelStr$mem[$index] := MemoryStore($value, $endian, $size)" - override def acceptVisit(visitor: Visitor): Statement = visitor.visitMemoryAssign(this) + override def acceptVisit(visitor: Visitor): Statement = visitor.visitMemoryStore(this) } -object MemoryAssign: - def unapply(m: MemoryAssign): Option[(Memory, Expr, Expr, Endian, Int, Option[String])] = Some(m.mem, m.index, m.value, m.endian, m.size, m.label) +object MemoryStore { + def unapply(m: MemoryStore): Option[(Memory, Expr, Expr, Endian, Int, Option[String])] = Some(m.mem, m.index, m.value, m.endian, m.size, m.label) +} + +class MemoryLoad(var lhs: Variable, var mem: Memory, var index: Expr, var endian: Endian, var size: Int, override val label: Option[String] = None) extends Assign { + override def modifies: Set[Global] = lhs match { + case r: Register => Set(r) + case _ => Set() + } + override def toString: String = s"$labelStr$lhs := MemoryLoad($mem, $index, $endian, $size)" + override def acceptVisit(visitor: Visitor): Statement = visitor.visitMemoryLoad(this) +} + +object MemoryLoad { + def unapply(m: MemoryLoad): Option[(Variable, Memory, Expr, Endian, Int, Option[String])] = Some(m.lhs, m.mem, m.index, m.endian, m.size, m.label) +} class NOP(override val label: Option[String] = None) extends Statement { override def toString: String = s"NOP $labelStr" @@ -87,10 +101,17 @@ class Unreachable(override val label: Option[String] = None) extends Jump { override def acceptVisit(visitor: Visitor): Jump = this } +object Unreachable { + def unapply(u: Unreachable): Option[Option[String]] = Some(u.label) +} + class Return(override val label: Option[String] = None) extends Jump { override def acceptVisit(visitor: Visitor): Jump = this } +object Return { + def unapply(r: Return): Option[Option[String]] = Some(r.label) +} class GoTo private (private val _targets: mutable.LinkedHashSet[Block], override val label: Option[String]) extends Jump { diff --git a/src/main/scala/ir/Visitor.scala b/src/main/scala/ir/Visitor.scala index 0b88f2a4c..c649dc4f3 100644 --- a/src/main/scala/ir/Visitor.scala +++ b/src/main/scala/ir/Visitor.scala @@ -10,19 +10,27 @@ abstract class Visitor { def visitStatement(node: Statement): Statement = node.acceptVisit(this) - def visitAssign(node: Assign): Statement = { + def visitLocalAssign(node: LocalAssign): Statement = { node.lhs = visitVariable(node.lhs) node.rhs = visitExpr(node.rhs) node } - def visitMemoryAssign(node: MemoryAssign): Statement = { + def visitMemoryStore(node: MemoryStore): Statement = { node.mem = visitMemory(node.mem) node.index = visitExpr(node.index) node.value = visitExpr(node.value) node } + def visitMemoryLoad(node: MemoryLoad): Statement = { + node.lhs = visitVariable(node.lhs) + node.mem = visitMemory(node.mem) + node.index = visitExpr(node.index) + node + } + + def visitAssume(node: Assume): Statement = { node.body = visitExpr(node.body) node @@ -110,10 +118,6 @@ abstract class Visitor { node.copy(arg1 = visitExpr(node.arg1), arg2 = visitExpr(node.arg2)) } - def visitMemoryLoad(node: MemoryLoad): Expr = { - node.copy(mem = visitMemory(node.mem), index = visitExpr(node.index)) - } - def visitMemory(node: Memory): Memory = node.acceptVisit(this) def visitStackMemory(node: StackMemory): Memory = node @@ -166,25 +170,26 @@ abstract class ReadOnlyVisitor extends Visitor { node } - override def visitMemoryLoad(node: MemoryLoad): Expr = { - visitMemory(node.mem) - visitExpr(node.index) - node - } - - override def visitAssign(node: Assign): Statement = { + override def visitLocalAssign(node: LocalAssign): Statement = { visitVariable(node.lhs) visitExpr(node.rhs) node } - override def visitMemoryAssign(node: MemoryAssign): Statement = { + override def visitMemoryStore(node: MemoryStore): Statement = { visitMemory(node.mem) visitExpr(node.index) visitExpr(node.value) node } + override def visitMemoryLoad(node: MemoryLoad): Statement = { + visitVariable(node.lhs) + visitMemory(node.mem) + visitExpr(node.index) + node + } + override def visitAssume(node: Assume): Statement = { visitExpr(node.body) node @@ -307,17 +312,18 @@ class StackSubstituter extends IntraproceduralControlFlowVisitor { override def visitMemoryLoad(node: MemoryLoad): MemoryLoad = { // replace mem with stack in load if index contains stack references val loadStackRefs = node.index.variables.intersect(stackRefs) + if (loadStackRefs.nonEmpty) { - node.copy(mem = stackMemory) - } else { - node + node.mem = stackMemory + } + if (stackRefs.contains(node.lhs) && node.lhs != stackPointer) { + stackRefs.remove(node.lhs) } - } - override def visitAssign(node: Assign): Statement = { - node.lhs = visitVariable(node.lhs) - node.rhs = visitExpr(node.rhs) + node + } + override def visitLocalAssign(node: LocalAssign): Statement = { // update stack references val variableVisitor = VariablesWithoutStoresLoads() variableVisitor.visitExpr(node.rhs) @@ -331,7 +337,7 @@ class StackSubstituter extends IntraproceduralControlFlowVisitor { node } - override def visitMemoryAssign(node: MemoryAssign): Statement = { + override def visitMemoryStore(node: MemoryStore): Statement = { val indexStackRefs = node.index.variables.intersect(stackRefs) if (indexStackRefs.nonEmpty) { node.mem = stackMemory @@ -421,7 +427,7 @@ class ExternalRemover(external: Set[String]) extends Visitor { } } -/** Gives variables that are not contained within a MemoryStore or MemoryLoad +/** Gives variables that are not contained within a MemoryStore or the rhs of a MemoryLoad * */ class VariablesWithoutStoresLoads extends ReadOnlyVisitor { val variables: mutable.Set[Variable] = mutable.Set() @@ -437,6 +443,7 @@ class VariablesWithoutStoresLoads extends ReadOnlyVisitor { } override def visitMemoryLoad(node: MemoryLoad): MemoryLoad = { + visitVariable(node.lhs) node } diff --git a/src/main/scala/ir/cilvisitor/CILVisitor.scala b/src/main/scala/ir/cilvisitor/CILVisitor.scala index 5583b12da..372b500de 100644 --- a/src/main/scala/ir/cilvisitor/CILVisitor.scala +++ b/src/main/scala/ir/cilvisitor/CILVisitor.scala @@ -35,7 +35,7 @@ trait CILVisitor: def leave_scope(outparam: ArrayBuffer[Parameter]): Unit = () -def doVisitList[T](v: CILVisitor, a: VisitAction[List[T]], n: T, continue: (T) => T): List[T] = { +def doVisitList[T](v: CILVisitor, a: VisitAction[List[T]], n: T, continue: T => T): List[T] = { a match { case SkipChildren() => List(n) case ChangeTo(z) => z @@ -44,7 +44,7 @@ def doVisitList[T](v: CILVisitor, a: VisitAction[List[T]], n: T, continue: (T) = } } -def doVisit[T](v: CILVisitor, a: VisitAction[T], n: T, continue: (T) => T): T = { +def doVisit[T](v: CILVisitor, a: VisitAction[T], n: T, continue: T => T): T = { a match { case SkipChildren() => n case DoChildren() => continue(n) @@ -56,31 +56,30 @@ def doVisit[T](v: CILVisitor, a: VisitAction[T], n: T, continue: (T) => T): T = class CILVisitorImpl(val v: CILVisitor) { def visit_parameters(p: ArrayBuffer[Parameter]): ArrayBuffer[Parameter] = { - doVisit(v, v.vparams(p), p, (n) => n) + doVisit(v, v.vparams(p), p, n => n) } def visit_var(n: Variable): Variable = { - doVisit(v, v.vvar(n), n, (n) => n) + doVisit(v, v.vvar(n), n, n => n) } def visit_mem(n: Memory): Memory = { - doVisit(v, v.vmem(n), n, (n) => n) + doVisit(v, v.vmem(n), n, n => n) } def visit_jump(j: Jump): Jump = { - doVisit(v, v.vjump(j), j, (j) => j) + doVisit(v, v.vjump(j), j, j => j) } def visit_fallthrough(j: Option[GoTo]): Option[GoTo] = { - doVisit(v, v.vfallthrough(j), j, (j) => j) + doVisit(v, v.vfallthrough(j), j, j => j) } def visit_expr(n: Expr): Expr = { def continue(n: Expr): Expr = n match { case n: Literal => n - case MemoryLoad(mem, index, endian, size) => MemoryLoad(visit_mem(mem), visit_expr(index), endian, size) case Extract(end, start, arg) => Extract(end, start, visit_expr(arg)) case Repeat(repeats, arg) => Repeat(repeats, visit_expr(arg)) case ZeroExtend(bits, arg) => ZeroExtend(bits, visit_expr(arg)) @@ -96,29 +95,29 @@ class CILVisitorImpl(val v: CILVisitor) { def visit_stmt(s: Statement): List[Statement] = { def continue(n: Statement) = n match { case d: DirectCall => d - case i: IndirectCall => { + case i: IndirectCall => i.target = visit_var(i.target) i - } - case m: MemoryAssign => { + case m: MemoryStore => m.mem = visit_mem(m.mem) m.index = visit_expr(m.index) m.value = visit_expr(m.value) m - } - case m: Assign => { + case m: MemoryLoad => + m.mem = visit_mem(m.mem) + m.index = visit_expr(m.index) + m.lhs = visit_var(m.lhs) + m + case m: LocalAssign => m.rhs = visit_expr(m.rhs) m.lhs = visit_var(m.lhs) m - } - case s: Assert => { + case s: Assert => s.body = visit_expr(s.body) s - } - case s: Assume => { + case s: Assume => s.body = visit_expr(s.body) s - } case n: NOP => n } doVisitList(v, v.vstmt(s), s, continue) @@ -126,7 +125,7 @@ class CILVisitorImpl(val v: CILVisitor) { def visit_block(b: Block): Block = { def continue(b: Block) = { - b.statements.foreach(s => { + b.statements.foreach { s => val r = visit_stmt(s) r match { case Nil => b.statements.remove(s) @@ -134,7 +133,7 @@ class CILVisitorImpl(val v: CILVisitor) { b.statements.replace(s, n) b.statements.insertAllAfter(Some(n), tl) } - }) + } b.replaceJump(visit_jump(b.jump)) b } diff --git a/src/main/scala/translating/BAPToIR.scala b/src/main/scala/translating/BAPToIR.scala index 908978046..9793022cb 100644 --- a/src/main/scala/translating/BAPToIR.scala +++ b/src/main/scala/translating/BAPToIR.scala @@ -16,6 +16,8 @@ class BAPToIR(var program: BAPProgram, mainAddress: BigInt) { private val nameToProcedure: mutable.Map[String, Procedure] = mutable.Map() private val labelToBlock: mutable.Map[String, Block] = mutable.Map() + private var loadCounter: Int = 0 + def translate: Program = { var mainProcedure: Option[Procedure] = None val procedures: ArrayBuffer[Procedure] = ArrayBuffer() @@ -30,10 +32,10 @@ class BAPToIR(var program: BAPProgram, mainAddress: BigInt) { labelToBlock.addOne(b.label, block) } for (p <- s.in) { - procedure.in.append(p.toIR) + procedure.in.append(translateParameter(p)) } for (p <- s.out) { - procedure.out.append(p.toIR) + procedure.out.append(translateParameter(p)) } if (s.address.get == mainAddress) { mainProcedure = Some(procedure) @@ -47,7 +49,10 @@ class BAPToIR(var program: BAPProgram, mainAddress: BigInt) { for (b <- s.blocks) { val block = labelToBlock(b.label) for (st <- b.statements) { - block.statements.append(translate(st)) + val statements = translateStatement(st) + for (s <- statements) { + block.statements.append(s) + } } val (call, jump, newBlocks) = translate(b.jumps, block) procedure.addBlocks(newBlocks) @@ -68,7 +73,7 @@ class BAPToIR(var program: BAPProgram, mainAddress: BigInt) { val bytes = if (m.name == ".bss" && m.bytes.isEmpty) { for (_ <- 0 until m.size) yield BitVecLiteral(0, 8) } else { - m.bytes.map(_.toIR) + m.bytes.map(translateLiteral) } val readOnly = m.name == ".rodata" || m.name == ".got" // crude heuristic memorySections.addOne(m.address, MemorySection(m.name, m.address, m.size, bytes, readOnly, None)) @@ -77,17 +82,146 @@ class BAPToIR(var program: BAPProgram, mainAddress: BigInt) { Program(procedures, mainProcedure.get, memorySections) } - private def translate(s: BAPStatement) = s match { + private def translateStatement(s: BAPStatement): Seq[Statement] = s match { case b: BAPMemAssign => - val mem = b.lhs.toIRMemory - if (mem != b.rhs.memory.toIRMemory) { + val mem = translateMemory(b.lhs) + if (mem != translateMemory(b.rhs.memory)) { throw Exception(s"$b has conflicting lhs ${b.lhs} and rhs ${b.rhs.memory}") } - MemoryAssign(mem, b.rhs.index.toIR, b.rhs.value.toIR, b.rhs.endian, b.rhs.size, Some(b.line)) + Seq(MemoryStore(mem, translateExprOnly(b.rhs.index), translateExprOnly(b.rhs.value), b.rhs.endian, b.rhs.size, Some(b.line))) case b: BAPLocalAssign => - Assign(b.lhs.toIR, b.rhs.toIR, Some(b.line)) + val lhs = translateVar(b.lhs) + val (rhs, load) = translateExpr(b.rhs) + if (load.isDefined) { + val loadWithLabel = MemoryLoad(load.get.lhs, load.get.mem, load.get.index, load.get.endian, load.get.size, Some(b.line + "$0")) + val assign = LocalAssign(lhs, rhs, Some(b.line + "$1")) + Seq(loadWithLabel, assign) + } else { + val assign = LocalAssign(lhs, rhs, Some(b.line)) + Seq(assign) + } + } + + private def translateExpr(e: BAPExpr): (Expr, Option[MemoryLoad]) = e match { + case b @ BAPConcat(left, right) => + val (arg0, load0) = translateExpr(left) + val (arg1, load1) = translateExpr(right) + (load0, load1) match { + case (Some(load), None) => (BinaryExpr(BVCONCAT, arg0, arg1), Some(load)) + case (None, Some(load)) => (BinaryExpr(BVCONCAT, arg0, arg1), Some(load)) + case (None, None) => (BinaryExpr(BVCONCAT, arg0, arg1), None) + case (Some(_), Some(_)) => throw Exception(s"$b contains multiple loads") + } + case BAPSignedExtend(width, body) => + if (width > body.size) { + val (irBody, load) = translateExpr(body) + val se = SignExtend(width - body.size, irBody) + (se, load) + } else { + translateExpr(BAPExtract(width - 1, 0, body)) + } + case BAPUnsignedExtend(width, body) => + if (width > body.size) { + val (irBody, load) = translateExpr(body) + val ze = ZeroExtend(width - body.size, irBody) + (ze, load) + } else { + translateExpr(BAPExtract(width - 1, 0, body)) + } + case b @ BAPExtract(high, low, body) => + val bodySize = body.size + val (irBody, load) = translateExpr(body) + val extract = if (b.size > bodySize) { + if (low == 0) { + ZeroExtend(b.size - bodySize, irBody) + } else { + Extract(high + 1, low, ZeroExtend(b.size - bodySize, irBody)) + } + } else { + Extract(high + 1, low, irBody) + } + (extract, load) + case literal: BAPLiteral => (translateLiteral(literal), None) + case BAPUnOp(operator, exp) => operator match { + case NOT => (UnaryExpr(BVNOT, translateExprOnly(exp)), None) + case NEG => (UnaryExpr(BVNEG, translateExprOnly(exp)), None) + } + case BAPBinOp(operator, lhs, rhs) => operator match { + case PLUS => (BinaryExpr(BVADD, translateExprOnly(lhs), translateExprOnly(rhs)), None) + case MINUS => (BinaryExpr(BVSUB, translateExprOnly(lhs), translateExprOnly(rhs)), None) + case TIMES => (BinaryExpr(BVMUL, translateExprOnly(lhs), translateExprOnly(rhs)), None) + case DIVIDE => (BinaryExpr(BVUDIV, translateExprOnly(lhs), translateExprOnly(rhs)), None) + case SDIVIDE => (BinaryExpr(BVSDIV, translateExprOnly(lhs), translateExprOnly(rhs)), None) + // counterintuitive but correct according to BAP source + case MOD => (BinaryExpr(BVSREM, translateExprOnly(lhs), translateExprOnly(rhs)), None) + // counterintuitive but correct according to BAP source + case SMOD => (BinaryExpr(BVUREM, translateExprOnly(lhs), translateExprOnly(rhs)), None) + case LSHIFT => // BAP says caring about this case is necessary? + if (lhs.size == rhs.size) { + (BinaryExpr(BVSHL, translateExprOnly(lhs), translateExprOnly(rhs)), None) + } else { + (BinaryExpr(BVSHL, translateExprOnly(lhs), ZeroExtend(lhs.size - rhs.size, translateExprOnly(rhs))), None) + } + case RSHIFT => + if (lhs.size == rhs.size) { + (BinaryExpr(BVLSHR, translateExprOnly(lhs), translateExprOnly(rhs)), None) + } else { + (BinaryExpr(BVLSHR, translateExprOnly(lhs), ZeroExtend(lhs.size - rhs.size, translateExprOnly(rhs))), None) + } + case ARSHIFT => + if (lhs.size == rhs.size) { + (BinaryExpr(BVASHR, translateExprOnly(lhs), translateExprOnly(rhs)), None) + } else { + (BinaryExpr(BVASHR, translateExprOnly(lhs), ZeroExtend(lhs.size - rhs.size, translateExprOnly(rhs))), None) + } + case AND => (BinaryExpr(BVAND, translateExprOnly(lhs), translateExprOnly(rhs)), None) + case OR => (BinaryExpr(BVOR, translateExprOnly(lhs), translateExprOnly(rhs)), None) + case XOR => (BinaryExpr(BVXOR, translateExprOnly(lhs), translateExprOnly(rhs)), None) + case EQ => (BinaryExpr(BVCOMP, translateExprOnly(lhs), translateExprOnly(rhs)), None) + case NEQ => (UnaryExpr(BVNOT, BinaryExpr(BVCOMP, translateExprOnly(lhs), translateExprOnly(rhs))), None) + case LT => (BinaryExpr(BVULT, translateExprOnly(lhs), translateExprOnly(rhs)), None) + case LE => (BinaryExpr(BVULE, translateExprOnly(lhs), translateExprOnly(rhs)), None) + case SLT => (BinaryExpr(BVSLT, translateExprOnly(lhs), translateExprOnly(rhs)), None) + case SLE => (BinaryExpr(BVSLE, translateExprOnly(lhs), translateExprOnly(rhs)), None) + } + case b: BAPVar => (translateVar(b), None) + case BAPMemAccess(memory, index, endian, size) => + val temp = LocalVar("$load$" + loadCounter, BitVecType(size)) + loadCounter += 1 + val load = MemoryLoad(temp, translateMemory(memory), translateExprOnly(index), endian, size, None) + (temp, Some(load)) + } + + private def translateExprOnly(e: BAPExpr) = { + val (expr, load) = translateExpr(e) + if (load.isDefined) { + throw Exception(s"unexpected load in $e") + } + expr + } + + private def translateVar(variable: BAPVar): Variable = variable match { + case BAPRegister(name, size) => Register(name, size) + case BAPLocalVar(name, size) => LocalVar(name, BitVecType(size)) + } + + private def translateMemory(memory: BAPMemory): Memory = { + SharedMemory(memory.name, memory.addressSize, memory.valueSize) } + private def translateParameter(parameter: BAPParameter): Parameter = { + val register = translateExprOnly(parameter.value) + register match { + case r: Register => Parameter(parameter.name, parameter.size, r) + case _ => throw Exception(s"subroutine parameter $this refers to non-register variable ${parameter.value}") + } + } + + private def translateLiteral(literal: BAPLiteral) = { + BitVecLiteral(literal.value, literal.size) + } + + /** * Translates a list of jumps from BAP into a single Jump at the IR level by moving any conditions on jumps to * Assume statements in new blocks @@ -112,7 +246,9 @@ class BAPToIR(var program: BAPProgram, mainAddress: BigInt) { // condition is true and previous conditions existing means this condition // is actually that all previous conditions are false val conditionsIR = conditions.map(c => convertConditionBool(c, true)) - val condition = conditionsIR.tail.foldLeft(conditionsIR.head)((ands: Expr, next: Expr) => BinaryExpr(BoolAND, next, ands)) + val condition = conditionsIR.tail.foldLeft(conditionsIR.head) { + (ands: Expr, next: Expr) => BinaryExpr(BoolAND, next, ands) + } val newBlock = newBlockCondition(block, target, condition) newBlocks.append(newBlock) targets.append(newBlock) @@ -127,7 +263,9 @@ class BAPToIR(var program: BAPProgram, mainAddress: BigInt) { // if this is not the first condition, then we need to need to add // that all previous conditions are false val conditionsIR = conditions.map(c => convertConditionBool(c, true)) - conditionsIR.tail.foldLeft(currentCondition)((ands: Expr, next: Expr) => BinaryExpr(BoolAND, next, ands)) + conditionsIR.tail.foldLeft(currentCondition) { + (ands: Expr, next: Expr) => BinaryExpr(BoolAND, next, ands) + } } val newBlock = newBlockCondition(block, target, condition) newBlocks.append(newBlock) @@ -142,11 +280,11 @@ class BAPToIR(var program: BAPProgram, mainAddress: BigInt) { jumps.head match { case b: BAPDirectCall => val call = Some(DirectCall(nameToProcedure(b.target),Some(b.line))) - val ft = (b.returnTarget.map(t => labelToBlock(t))).map(x => GoTo(Set(x))).getOrElse(Unreachable()) + val ft = b.returnTarget.map(t => labelToBlock(t)).map(x => GoTo(Set(x))).getOrElse(Unreachable()) (call, ft, ArrayBuffer()) case b: BAPIndirectCall => - val call = IndirectCall(b.target.toIR, Some(b.line)) - val ft = (b.returnTarget.map(t => labelToBlock(t))).map(x => GoTo(Set(x))).getOrElse(Unreachable()) + val call = IndirectCall(translateVar(b.target), Some(b.line)) + val ft = b.returnTarget.map(t => labelToBlock(t)).map(x => GoTo(Set(x))).getOrElse(Unreachable()) (Some(call), ft, ArrayBuffer()) case b: BAPGoTo => val target = labelToBlock(b.target) @@ -173,7 +311,7 @@ class BAPToIR(var program: BAPProgram, mainAddress: BigInt) { * if necessary. * */ private def convertConditionBool(expr: BAPExpr, negative: Boolean): Expr = { - val e = expr.toIR + val e = translateExprOnly(expr) e.getType match { case BitVecType(s) => if (negative) { diff --git a/src/main/scala/translating/SemanticsLoader.scala b/src/main/scala/translating/GTIRBLoader.scala similarity index 68% rename from src/main/scala/translating/SemanticsLoader.scala rename to src/main/scala/translating/GTIRBLoader.scala index e858a9e85..836ed5de0 100644 --- a/src/main/scala/translating/SemanticsLoader.scala +++ b/src/main/scala/translating/GTIRBLoader.scala @@ -1,5 +1,5 @@ package translating -import Parsers.SemanticsParser.* +import Parsers.ASLpParser.* import com.google.protobuf.ByteString import Parsers.* @@ -13,14 +13,15 @@ import scala.collection.mutable.ArrayBuffer import com.grammatech.gtirb.proto.Module.ByteOrder.LittleEndian import util.Logger -class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]]]) { +class GTIRBLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]]]) { private val constMap = mutable.Map[String, IRType]() private val varMap = mutable.Map[String, IRType]() private var instructionCount = 0 private var blockCount = 0 + private var loadCounter = 0 - val opcodeSize = 4 + private val opcodeSize = 4 def visitBlock(blockUUID: ByteString, blockCountIn: Int, blockAddress: Option[BigInt]): ArrayBuffer[Statement] = { blockCount = blockCountIn @@ -39,34 +40,31 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] val instructionAddress = a + (opcodeSize * instructionCount) instructionAddress.toString + "$" + i } - - val statement = visitStmt(s, label) - if (statement.isDefined) { - statements.append(statement.get) - } + + statements.appendAll(visitStmt(s, label)) } instructionCount += 1 } statements } - private def visitStmt(ctx: StmtContext, label: Option[String] = None): Option[Statement] = { + private def visitStmt(ctx: StmtContext, label: Option[String] = None): Seq[Statement] = { ctx match { case a: AssignContext => visitAssign(a, label) case c: ConstDeclContext => visitConstDecl(c, label) case v: VarDeclContext => visitVarDecl(v, label) case v: VarDeclsNoInitContext => visitVarDeclsNoInit(v) - None - case a: AssertContext => visitAssert(a, label) - case t: TCallContext => visitTCall(t, label) - case i: IfContext => visitIf(i, label) - case t: ThrowContext => Some(visitThrow(t, label)) + Seq() + case a: AssertContext => visitAssert(a, label).toSeq + case t: TCallContext => visitTCall(t, label).toSeq + case i: IfContext => visitIf(i, label).toSeq + case t: ThrowContext => Seq(visitThrow(t, label)) } } - private def visitAssert(ctx: AssertContext, label: Option[String] = None): Option[Assert] = { - val expr = visitExpr(ctx.expr) + private def visitAssert(ctx: AssertContext, label: Option[String] = None): Option[Statement] = { + val expr = visitExprOnly(ctx.expr) if (expr.isDefined) { Some(Assert(expr.get, None, label)) } else { @@ -90,8 +88,8 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] checkArgs(function, 1, 4, typeArgs.size, args.size, ctx.getText) val mem = SharedMemory("mem", 64, 8) // yanked from BAP val size = parseInt(typeArgs.head) * 8 - val index = visitExpr(args.head) - val value = visitExpr(args(3)) + val index = visitExprOnly(args.head) + val value = visitExprOnly(args(3)) val otherSize = parseInt(args(1)) * 8 val accessType = parseInt(args(2)) // AccType enum in ASLi, not very relevant to us if (size != otherSize) { @@ -100,12 +98,12 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] // LittleEndian is an assumption if (index.isDefined && value.isDefined) { - Some(MemoryAssign(mem, index.get, value.get, Endian.LittleEndian, size.toInt, label)) + Some(MemoryStore(mem, index.get, value.get, Endian.LittleEndian, size.toInt, label)) } else { None } case "unsupported_opcode.0" => { - val op = args.headOption.flatMap(visitExpr) match { + val op = args.headOption.flatMap(visitExprOnly) match { case Some(IntLiteral(s)) => Some("%08x".format(s)) case c => c.map(_.toString) } @@ -130,7 +128,7 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] } private def visitIf(ctx: IfContext, label: Option[String] = None): Option[TempIf] = { - val condition = visitExpr(ctx.cond) + val condition = visitExprOnly(ctx.cond) val thenStmts = ctx.thenStmts.stmt.asScala.flatMap(visitStmt(_, label)) val elseStmts = Option(ctx.elseStmts) match { @@ -151,35 +149,64 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] varMap ++= newVars } - private def visitVarDecl(ctx: VarDeclContext, label: Option[String] = None): Option[Assign] = { + private def visitVarDecl(ctx: VarDeclContext, label: Option[String] = None): Seq[Statement] = { val ty = visitType(ctx.`type`()) val name = visitIdent(ctx.lvar) varMap += (name -> ty) - val expr = visitExpr(ctx.expr()) - expr.map(Assign(LocalVar(name, ty), _, label)) + val (expr, load) = visitExpr(ctx.expr) + if (expr.isDefined) { + if (load.isDefined) { + val loadWithLabel = MemoryLoad(load.get.lhs, load.get.mem, load.get.index, load.get.endian, load.get.size, label.map(_ + "$0")) + val assign = LocalAssign(LocalVar(name, ty), expr.get, label.map(_ + "$1")) + Seq(loadWithLabel, assign) + } else { + val assign = LocalAssign(LocalVar(name, ty), expr.get, label) + Seq(assign) + } + } else { + Seq() + } } - private def visitAssign(ctx: AssignContext, label: Option[String] = None): Option[Assign] = { + private def visitAssign(ctx: AssignContext, label: Option[String] = None): Seq[Statement] = { val lhs = visitLexpr(ctx.lexpr) - val rhs = visitExpr(ctx.expr) - lhs.zip(rhs).map((lhs, rhs) => Assign(lhs, rhs, label)) + val (rhs, load) = visitExpr(ctx.expr) + if (lhs.isDefined && rhs.isDefined) { + if (load.isDefined) { + val loadWithLabel = MemoryLoad(load.get.lhs, load.get.mem, load.get.index, load.get.endian, load.get.size, label.map(_ + "$0")) + val assign = LocalAssign(lhs.get, rhs.get, label.map(_ + "$1")) + Seq(loadWithLabel, assign) + } else { + val assign = LocalAssign(lhs.get, rhs.get, label) + Seq(assign) + } + } else { + Seq() + } } - private def visitConstDecl(ctx: ConstDeclContext, label: Option[String] = None): Option[Assign] = { + private def visitConstDecl(ctx: ConstDeclContext, label: Option[String] = None): Seq[Statement] = { val ty = visitType(ctx.`type`()) val name = visitIdent(ctx.lvar) constMap += (name -> ty) - val expr = visitExpr(ctx.expr) + val (expr, load) = visitExpr(ctx.expr) if (expr.isDefined) { - Some(Assign(LocalVar(name + "$" + blockCount + "$" + instructionCount, ty), expr.get, label)) + if (load.isDefined) { + val loadWithLabel = MemoryLoad(load.get.lhs, load.get.mem, load.get.index, load.get.endian, load.get.size, label.map(_ + "$0")) + val assign = LocalAssign(LocalVar(name + "$" + blockCount + "$" + instructionCount, ty), expr.get, label.map(_ + "$1")) + Seq(loadWithLabel, assign) + } else { + val assign = LocalAssign(LocalVar(name + "$" + blockCount + "$" + instructionCount, ty), expr.get, label) + Seq(assign) + } } else { - None + Seq() } } private def visitType(ctx: TypeContext): IRType = { - ctx match + ctx match { case e: TypeBitsContext => BitVecType(parseInt(e.size).toInt) case r: TypeRegisterContext => // this is a special register - not the same as a register in the IR @@ -191,21 +218,31 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] case _ => throw Exception(s"unknown type ${ctx.getText}") } case _ => throw Exception(s"unknown type ${ctx.getText}") + } } - private def visitExpr(ctx: ExprContext): Option[Expr] = { + private def visitExpr(ctx: ExprContext): (Option[Expr], Option[MemoryLoad]) = { ctx match { - case e: ExprVarContext => visitExprVar(e) + case e: ExprVarContext => (visitExprVar(e), None) case e: ExprTApplyContext => visitExprTApply(e) case e: ExprSlicesContext => visitExprSlices(e) - case e: ExprFieldContext => Some(visitExprField(e)) - case e: ExprArrayContext => Some(visitExprArray(e)) - case e: ExprLitIntContext => Some(IntLiteral(parseInt(e))) - case e: ExprLitBitsContext => Some(visitExprLitBits(e)) + case e: ExprFieldContext => (Some(visitExprField(e)), None) + case e: ExprArrayContext => (Some(visitExprArray(e)), None) + case e: ExprLitIntContext => (Some(IntLiteral(parseInt(e))), None) + case e: ExprLitBitsContext => (Some(visitExprLitBits(e)), None) } } - private def visitExprVar(ctx: ExprVarContext): Option[Expr] = { + private def visitExprOnly(ctx: ExprContext): Option[Expr] = { + val (expr, load) = visitExpr(ctx) + if (load.isDefined) { + throw Exception("") + } else { + expr + } + } + + private def visitExprVar(ctx: ExprVarContext): Option[Expr] = { val name = visitIdent(ctx.ident) name match { case n if constMap.contains(n) => Some(LocalVar(n + "$" + blockCount + "$" + instructionCount, constMap(n))) @@ -225,7 +262,7 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] } } - private def visitExprTApply(ctx: ExprTApplyContext): Option[Expr] = { + private def visitExprTApply(ctx: ExprTApplyContext): (Option[Expr], Option[MemoryLoad]) = { val function = visitIdent(ctx.ident) val typeArgs: mutable.Buffer[ExprContext] = Option(ctx.tes) match { @@ -241,7 +278,7 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] case "Mem.read.0" => checkArgs(function, 1, 3, typeArgs.size, args.size, ctx.getText) val mem = SharedMemory("mem", 64, 8) - val index = visitExpr(args.head) + val index = visitExprOnly(args.head) // can't have load inside load val size = parseInt(typeArgs.head) * 8 val otherSize = parseInt(args(1)) * 8 @@ -250,112 +287,112 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] throw Exception(s"inconsistent size parameters in Mem.read.0: ${ctx.getText}") } + val temp = LocalVar("$load" + loadCounter, BitVecType(size.toInt)) + loadCounter += 1 + if (index.isDefined) { // LittleEndian is assumed - Some(MemoryLoad(mem, index.get, Endian.LittleEndian, size.toInt)) + (Some(temp), Some(MemoryLoad(temp, mem, index.get, Endian.LittleEndian, size.toInt, None))) } else { - None + (None, None) } case "cvt_bool_bv.0" => checkArgs(function, 0, 1, typeArgs.size, args.size, ctx.getText) - val expr = visitExpr(args.head) - if (expr.isDefined) { - val e = expr.get - e match { - case b: BinaryExpr if b.op == BVEQ => Some(BinaryExpr(BVCOMP, b.arg1, b.arg2)) - case FalseLiteral => Some(BitVecLiteral(0, 1)) - case TrueLiteral => Some(BitVecLiteral(1, 1)) - case _ => throw Exception(s"unhandled conversion from bool to bitvector: ${ctx.getText}") - } - } else { - None + val expr = visitExprOnly(args.head) + val result = expr.map { + case b: BinaryExpr if b.op == BVEQ => BinaryExpr(BVCOMP, b.arg1, b.arg2) + case FalseLiteral => BitVecLiteral(0, 1) + case TrueLiteral => BitVecLiteral(1, 1) + case _ => throw Exception(s"unhandled conversion from bool to bitvector: ${ctx.getText}") } - - case "not_bool.0" => resolveUnaryOp(BoolNOT, function, 0, typeArgs, args, ctx.getText) - case "eq_enum.0" => resolveBinaryOp(BoolEQ, function, 0, typeArgs, args, ctx.getText) - case "or_bool.0" => resolveBinaryOp(BoolOR, function, 0, typeArgs, args, ctx.getText) - case "and_bool.0" => resolveBinaryOp(BoolAND, function, 0, typeArgs, args, ctx.getText) - - case "not_bits.0" => resolveUnaryOp(BVNOT, function, 1, typeArgs, args, ctx.getText) - case "or_bits.0" => resolveBinaryOp(BVOR, function, 1, typeArgs, args, ctx.getText) - case "and_bits.0" => resolveBinaryOp(BVAND, function, 1, typeArgs, args, ctx.getText) - case "eor_bits.0" => resolveBinaryOp(BVXOR, function, 1, typeArgs, args, ctx.getText) - case "eq_bits.0" => resolveBinaryOp(BVEQ, function, 1, typeArgs, args, ctx.getText) - case "add_bits.0" => resolveBinaryOp(BVADD, function, 1, typeArgs, args, ctx.getText) - case "sub_bits.0" => resolveBinaryOp(BVSUB, function, 1, typeArgs, args, ctx.getText) - case "mul_bits.0" => resolveBinaryOp(BVMUL, function, 1, typeArgs, args, ctx.getText) - case "sdiv_bits.0" => resolveBinaryOp(BVSDIV, function, 1, typeArgs, args, ctx.getText) - - case "slt_bits.0" => resolveBinaryOp(BVSLT, function, 1, typeArgs, args, ctx.getText) - case "sle_bits.0" => resolveBinaryOp(BVSLE, function, 1, typeArgs, args, ctx.getText) - - case "lsl_bits.0" => resolveBitShiftOp(BVSHL, function, typeArgs, args, ctx.getText) - case "lsr_bits.0" => resolveBitShiftOp(BVLSHR, function, typeArgs, args, ctx.getText) - case "asr_bits.0" => resolveBitShiftOp(BVASHR, function, typeArgs, args, ctx.getText) + (result, None) + + case "not_bool.0" => (resolveUnaryOp(BoolNOT, function, 0, typeArgs, args, ctx.getText), None) + case "eq_enum.0" => (resolveBinaryOp(BoolEQ, function, 0, typeArgs, args, ctx.getText), None) + case "or_bool.0" => (resolveBinaryOp(BoolOR, function, 0, typeArgs, args, ctx.getText), None) + case "and_bool.0" => (resolveBinaryOp(BoolAND, function, 0, typeArgs, args, ctx.getText), None) + + case "not_bits.0" => (resolveUnaryOp(BVNOT, function, 1, typeArgs, args, ctx.getText), None) + case "or_bits.0" => (resolveBinaryOp(BVOR, function, 1, typeArgs, args, ctx.getText), None) + case "and_bits.0" => (resolveBinaryOp(BVAND, function, 1, typeArgs, args, ctx.getText), None) + case "eor_bits.0" => (resolveBinaryOp(BVXOR, function, 1, typeArgs, args, ctx.getText), None) + case "eq_bits.0" => (resolveBinaryOp(BVEQ, function, 1, typeArgs, args, ctx.getText), None) + case "add_bits.0" => (resolveBinaryOp(BVADD, function, 1, typeArgs, args, ctx.getText), None) + case "sub_bits.0" => (resolveBinaryOp(BVSUB, function, 1, typeArgs, args, ctx.getText), None) + case "mul_bits.0" => (resolveBinaryOp(BVMUL, function, 1, typeArgs, args, ctx.getText), None) + case "sdiv_bits.0" => (resolveBinaryOp(BVSDIV, function, 1, typeArgs, args, ctx.getText), None) + + case "slt_bits.0" => (resolveBinaryOp(BVSLT, function, 1, typeArgs, args, ctx.getText), None) + case "sle_bits.0" => (resolveBinaryOp(BVSLE, function, 1, typeArgs, args, ctx.getText), None) + + case "lsl_bits.0" => (resolveBitShiftOp(BVSHL, function, typeArgs, args, ctx.getText), None) + case "lsr_bits.0" => (resolveBitShiftOp(BVLSHR, function, typeArgs, args, ctx.getText), None) + case "asr_bits.0" => (resolveBitShiftOp(BVASHR, function, typeArgs, args, ctx.getText), None) case "append_bits.0" => - resolveBinaryOp(BVCONCAT, function, 2, typeArgs, args, ctx.getText) + (resolveBinaryOp(BVCONCAT, function, 2, typeArgs, args, ctx.getText), None) case "replicate_bits.0" => checkArgs(function, 2, 2, typeArgs.size, args.size, ctx.getText) val oldSize = parseInt(typeArgs(0)) val replications = parseInt(typeArgs(1)).toInt - val arg0 = visitExpr(args(0)) + // memory loads shouldn't appear here? + val arg0 = visitExprOnly(args(0)) val arg1 = parseInt(args(1)) val newSize = oldSize * replications if (arg1 != replications) { Exception(s"inconsistent size parameters in replicate_bits.0: ${ctx.getText}") } if (arg0.isDefined) { - Some(Repeat(replications, arg0.get)) + (Some(Repeat(replications, arg0.get)), None) } else { - None + (None, None) } case "ZeroExtend.0" => checkArgs(function, 2, 2, typeArgs.size, args.size, ctx.getText) val oldSize = parseInt(typeArgs(0)) val newSize = parseInt(typeArgs(1)) - val arg0 = visitExpr(args(0)) + val (arg0, load) = visitExpr(args(0)) val arg1 = parseInt(args(1)) if (arg1 != newSize) { Exception(s"inconsistent size parameters in ZeroExtend.0: ${ctx.getText}") } if (arg0.isDefined) { - Some(ZeroExtend((newSize - oldSize).toInt, arg0.get)) + (Some(ZeroExtend((newSize - oldSize).toInt, arg0.get)), load) } else { - None + (None, None) } case "SignExtend.0" => checkArgs(function, 2, 2, typeArgs.size, args.size, ctx.getText) val oldSize = parseInt(typeArgs(0)) val newSize = parseInt(typeArgs(1)) - val arg0 = visitExpr(args(0)) + val (arg0, load) = visitExpr(args(0)) val arg1 = parseInt(args(1)) if (arg1 != newSize) { Exception(s"inconsistent size parameters in SignExtend.0: ${ctx.getText}") } if (arg0.isDefined) { - Some(SignExtend((newSize - oldSize).toInt, arg0.get)) + (Some(SignExtend((newSize - oldSize).toInt, arg0.get)), load) } else { - None + (None, None) } case "FPCompareGT.0" | "FPCompareGE.0" | "FPCompareEQ.0" => checkArgs(function, 1, 3, typeArgs.size, args.size, ctx.getText) val name = function.stripSuffix(".0") val size = parseInt(typeArgs(0)) - val argsIR = args.flatMap(visitExpr).toSeq - Some(UninterpretedFunction(name + "$" + size, argsIR, BoolType)) + val argsIR = args.flatMap(visitExprOnly).toSeq + (Some(UninterpretedFunction(name + "$" + size, argsIR, BoolType)), None) case "FPAdd.0" | "FPMul.0" | "FPDiv.0" | "FPMulX.0" | "FPMax.0" | "FPMin.0" | "FPMaxNum.0" | "FPMinNum.0" | "FPSub.0" => checkArgs(function, 1, 3, typeArgs.size, args.size, ctx.getText) val name = function.stripSuffix(".0") val size = parseInt(typeArgs(0)).toInt - val argsIR = args.flatMap(visitExpr).toSeq - Some(UninterpretedFunction(name + "$" + size, argsIR, BitVecType(size))) + val argsIR = args.flatMap(visitExprOnly).toSeq + (Some(UninterpretedFunction(name + "$" + size, argsIR, BitVecType(size))), None) case "FPMulAddH.0" | "FPMulAdd.0" | "FPRoundInt.0" | @@ -363,31 +400,31 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] checkArgs(function, 1, 4, typeArgs.size, args.size, ctx.getText) val name = function.stripSuffix(".0") val size = parseInt(typeArgs(0)).toInt - val argsIR = args.flatMap(visitExpr).toSeq - Some(UninterpretedFunction(name + "$" + size, argsIR, BitVecType(size))) + val argsIR = args.flatMap(visitExprOnly).toSeq + (Some(UninterpretedFunction(name + "$" + size, argsIR, BitVecType(size))), None) case "FPRecpX.0" | "FPSqrt.0" | "FPRecipEstimate.0" | "FPRSqrtStepFused.0" | "FPRecipStepFused.0" => checkArgs(function, 1, 2, typeArgs.size, args.size, ctx.getText) val name = function.stripSuffix(".0") val size = parseInt(typeArgs(0)).toInt - val argsIR = args.flatMap(visitExpr).toSeq - Some(UninterpretedFunction(name + "$" + size, argsIR, BitVecType(size))) + val argsIR = args.flatMap(visitExprOnly).toSeq + (Some(UninterpretedFunction(name + "$" + size, argsIR, BitVecType(size))), None) case "FPCompare.0" => checkArgs(function, 1, 4, typeArgs.size, args.size, ctx.getText) val name = function.stripSuffix(".0") val size = parseInt(typeArgs(0)) - val argsIR = args.flatMap(visitExpr).toSeq - Some(UninterpretedFunction(name + "$" + size, argsIR, BitVecType(4))) + val argsIR = args.flatMap(visitExprOnly).toSeq + (Some(UninterpretedFunction(name + "$" + size, argsIR, BitVecType(4))), None) case "FPConvert.0" => checkArgs(function, 2, 3, typeArgs.size, args.size, ctx.getText) val name = function.stripSuffix(".0") val outSize = parseInt(typeArgs(0)).toInt val inSize = parseInt(typeArgs(1)) - val argsIR = args.flatMap(visitExpr).toSeq - Some(UninterpretedFunction(name + "$" + outSize + "$" + inSize, argsIR, BitVecType(outSize))) + val argsIR = args.flatMap(visitExprOnly).toSeq + (Some(UninterpretedFunction(name + "$" + outSize + "$" + inSize, argsIR, BitVecType(outSize))), None) case "FPToFixed.0" => checkArgs(function, 2, 5, typeArgs.size, args.size, ctx.getText) @@ -395,8 +432,8 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] val outSize = parseInt(typeArgs(0)).toInt val inSize = parseInt(typeArgs(1)) // need to specifically handle the integer parameter - val argsIR = args.flatMap(visitExpr).toSeq - Some(UninterpretedFunction(name + "$" + outSize + "$" + inSize, argsIR, BitVecType(outSize))) + val argsIR = args.flatMap(visitExprOnly).toSeq + (Some(UninterpretedFunction(name + "$" + outSize + "$" + inSize, argsIR, BitVecType(outSize))), None) case "FixedToFP.0" => checkArgs(function, 2, 5, typeArgs.size, args.size, ctx.getText) @@ -404,28 +441,28 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] val inSize = parseInt(typeArgs(0)) val outSize = parseInt(typeArgs(1)).toInt // need to specifically handle the integer parameter - val argsIR = args.flatMap(visitExpr).toSeq - Some(UninterpretedFunction(name + "$" + outSize + "$" + inSize, argsIR, BitVecType(outSize))) + val argsIR = args.flatMap(visitExprOnly).toSeq + (Some(UninterpretedFunction(name + "$" + outSize + "$" + inSize, argsIR, BitVecType(outSize))), None) case "FPConvertBF.0" => checkArgs(function, 0, 3, typeArgs.size, args.size, ctx.getText) val name = function.stripSuffix(".0") - val argsIR = args.flatMap(visitExpr).toSeq - Some(UninterpretedFunction(name, argsIR, BitVecType(32))) + val argsIR = args.flatMap(visitExprOnly).toSeq + (Some(UninterpretedFunction(name, argsIR, BitVecType(32))), None) case "FPToFixedJS_impl.0" => checkArgs(function, 2, 3, typeArgs.size, args.size, ctx.getText) val name = function.stripSuffix(".0") val inSize = parseInt(typeArgs(0)) val outSize = parseInt(typeArgs(1)).toInt - val argsIR = args.flatMap(visitExpr).toSeq - Some(UninterpretedFunction(name + "$" + outSize + "$" + inSize, argsIR, BitVecType(outSize))) + val argsIR = args.flatMap(visitExprOnly).toSeq + (Some(UninterpretedFunction(name + "$" + outSize + "$" + inSize, argsIR, BitVecType(outSize))), None) case "BFAdd.0" | "BFMul.0" => checkArgs(function, 0, 2, typeArgs.size, args.size, ctx.getText) val name = function.stripSuffix(".0") - val argsIR = args.flatMap(visitExpr).toSeq - Some(UninterpretedFunction(name, argsIR, BitVecType(32))) + val argsIR = args.flatMap(visitExprOnly).toSeq + (Some(UninterpretedFunction(name, argsIR, BitVecType(32))), None) case _ => // known ASLp methods not yet handled: @@ -434,7 +471,7 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] // and will require some research into their semantics // AtomicStart, AtomicEnd - can't model as uninterpreted functions, requires modelling atomic section Logger.debug(s"unidentified call to $function: ${ctx.getText}") - None + (None, None) } } @@ -448,8 +485,9 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] ): Option[BinaryExpr] = { checkArgs(function, typeArgsExpected, 2, typeArgs.size, args.size, token) // we don't currently check the size for BV ops which is the type arg - val arg0 = visitExpr(args(0)) - val arg1 = visitExpr(args(1)) + // memory loads shouldn't appear inside binary operations? + val arg0 = visitExprOnly(args(0)) + val arg1 = visitExprOnly(args(1)) if (arg0.isDefined && arg1.isDefined) { Some(BinaryExpr(operator, arg0.get, arg1.get)) } else { @@ -466,7 +504,8 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] ): Option[UnaryExpr] = { checkArgs(function, typeArgsExpected, 1, typeArgs.size, args.size, token) // we don't currently check the size for BV ops which is the type arg - val arg = visitExpr(args.head) + // memory loads shouldn't appear inside unary operations? + val arg = visitExprOnly(args.head) if (arg.isDefined) { Some(UnaryExpr(operator, arg.get)) } else { @@ -483,8 +522,9 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] checkArgs(function, 2, 2, typeArgs.size, args.size, token) val size0 = parseInt(typeArgs(0)) val size1 = parseInt(typeArgs(1)) - val arg0 = visitExpr(args(0)) - val arg1 = visitExpr(args(1)) + val arg0 = visitExprOnly(args(0)) + val arg1 = visitExprOnly(args(1)) + // memory loads shouldn't appear inside bitshifts? if (arg0.isDefined && arg1.isDefined) { if (size0 == size1) { Some(BinaryExpr(operator, arg0.get, arg1.get)) @@ -496,18 +536,18 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] } } - private def visitExprSlices(ctx: ExprSlicesContext): Option[Extract] = { + private def visitExprSlices(ctx: ExprSlicesContext): (Option[Extract], Option[MemoryLoad]) = { val slices = ctx.slices.slice().asScala if (slices.size != 1) { // need to determine the semantics for this case throw Exception(s"currently unable to handle Expr_Slices that contains more than one slice: ${ctx.getText}") } val (hi, lo) = visitSliceContext(slices.head) - val expr = visitExpr(ctx.expr) + val (expr, load) = visitExpr(ctx.expr) if (expr.isDefined) { - Some(Extract(hi, lo, expr.get)) + (Some(Extract(hi, lo, expr.get)), load) } else { - None + (None, None) } } @@ -524,7 +564,7 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] } } - private def visitExprField(ctx: ExprFieldContext): Register = { + private def visitExprField(ctx: ExprFieldContext): Register = { val name = ctx.expr match { case e: ExprVarContext => visitIdent(e.ident) case _ => throw Exception(s"expected ${ctx.getText} to have an Expr_Var as first parameter") @@ -534,7 +574,7 @@ class SemanticsLoader(parserMap: immutable.Map[String, Array[Array[StmtContext]] resolveFieldExpr(name, field) } - private def visitExprArray(ctx: ExprArrayContext): Register = { + private def visitExprArray(ctx: ExprArrayContext): Register = { val name = ctx.array match { case e: ExprVarContext => visitIdent(e.ident) case _ => throw Exception(s"expected ${ctx.getText} to have an Expr_Var as first parameter") diff --git a/src/main/scala/translating/GTIRBToIR.scala b/src/main/scala/translating/GTIRBToIR.scala index bf8a26ad9..31049706c 100644 --- a/src/main/scala/translating/GTIRBToIR.scala +++ b/src/main/scala/translating/GTIRBToIR.scala @@ -7,7 +7,7 @@ import com.grammatech.gtirb.proto.CFG.Edge import com.grammatech.gtirb.proto.CFG.EdgeLabel import com.grammatech.gtirb.proto.Module.Module import com.grammatech.gtirb.proto.Symbol.Symbol -import Parsers.SemanticsParser.* +import Parsers.ASLpParser.* import gtirb.* import ir.* @@ -156,7 +156,7 @@ class GTIRBToIR(mods: Seq[Module], parserMap: immutable.Map[String, Array[Array[ // maybe good to sort blocks by address around here? - val semanticsLoader = SemanticsLoader(parserMap) + val semanticsLoader = GTIRBLoader(parserMap) for ((functionUUID, blockUUIDs) <- functionBlocks) { val procedure = uuidToProcedure(functionUUID) @@ -228,7 +228,7 @@ class GTIRBToIR(mods: Seq[Module], parserMap: immutable.Map[String, Array[Array[ private def removePCAssign(block: Block): Option[String] = { block.statements.last match { - case last @ Assign(lhs: Register, _, _) if lhs.name == "_PC" => + case last @ LocalAssign(lhs: Register, _, _) if lhs.name == "_PC" => val label = last.label block.statements.remove(last) label @@ -238,7 +238,7 @@ class GTIRBToIR(mods: Seq[Module], parserMap: immutable.Map[String, Array[Array[ private def getPCTarget(block: Block): Register = { block.statements.last match { - case Assign(lhs: Register, rhs: Register, _) if lhs.name == "_PC" => rhs + case LocalAssign(lhs: Register, rhs: Register, _) if lhs.name == "_PC" => rhs case _ => throw Exception(s"expected block ${block.label} to have a program counter assignment at its end") } } @@ -373,8 +373,8 @@ class GTIRBToIR(mods: Seq[Module], parserMap: immutable.Map[String, Array[Array[ // need to copy jump as it can't have multiple parents val jumpCopy = currentBlock.jump match { case GoTo(targets, label) => GoTo(targets, label) - case h: Unreachable => Unreachable() - case r: Return => Return() + case Unreachable(label) => Unreachable(label) + case Return(label) => Return(label) case _ => throw Exception("this shouldn't be reachable") } trueBlock.replaceJump(currentBlock.jump) @@ -397,7 +397,7 @@ class GTIRBToIR(mods: Seq[Module], parserMap: immutable.Map[String, Array[Array[ if (proxySymbols.isEmpty) { // indirect call with no further information val target = block.statements.last match { - case Assign(lhs: Register, rhs: Register, _) if lhs.name == "_PC" => rhs + case LocalAssign(lhs: Register, rhs: Register, _) if lhs.name == "_PC" => rhs case _ => throw Exception(s"no assignment to program counter found before indirect call in block ${block.label}") } val label = block.statements.last.label diff --git a/src/main/scala/translating/ILtoIL.scala b/src/main/scala/translating/ILtoIL.scala index 856b18934..9e17aee9e 100644 --- a/src/main/scala/translating/ILtoIL.scala +++ b/src/main/scala/translating/ILtoIL.scala @@ -1,5 +1,5 @@ package translating -import ir._ +import ir.* private class ILSerialiser extends ReadOnlyVisitor { var program: StringBuilder = StringBuilder() @@ -32,7 +32,7 @@ private class ILSerialiser extends ReadOnlyVisitor { override def visitStatement(node: Statement): Statement = node.acceptVisit(this) - override def visitAssign(node: Assign): Statement = { + override def visitLocalAssign(node: LocalAssign): Statement = { program ++= "LocalAssign(" visitVariable(node.lhs) program ++= " := " @@ -41,8 +41,8 @@ private class ILSerialiser extends ReadOnlyVisitor { node } - override def visitMemoryAssign(node: MemoryAssign): Statement = { - program ++= "MemoryAssign(" + override def visitMemoryStore(node: MemoryStore): Statement = { + program ++= "MemoryStore(" visitMemory(node.mem) program ++= "[" visitExpr(node.index) @@ -53,6 +53,17 @@ private class ILSerialiser extends ReadOnlyVisitor { node } + override def visitMemoryLoad(node: MemoryLoad): Statement = { + program ++= "MemoryLoad(" + visitVariable(node.lhs) + program ++= " := " + visitMemory(node.mem) + program ++= ", [" + visitExpr(node.index) + program ++= "])" + node + } + override def visitAssert(node: Assert): Statement = { program ++= "Assert(" visitExpr(node.body) @@ -63,14 +74,13 @@ private class ILSerialiser extends ReadOnlyVisitor { override def visitJump(node: Jump): Jump = { node match { case j: GoTo => program ++= s"goTo(${j.targets.map(_.label).mkString(", ")})" - case h: Unreachable => program ++= "halt" - case h: Return => program ++= "return" + case _: Unreachable => program ++= "halt" + case _: Return => program ++= "return" } node } - override def visitGoTo(node: GoTo): GoTo = { program ++= "GoTo(" program ++= node.targets.map(blockIdentifier).mkString(", ") @@ -78,7 +88,6 @@ private class ILSerialiser extends ReadOnlyVisitor { node } - override def visitDirectCall(node: DirectCall): Statement = { program ++= "DirectCall(" program ++= procedureIdentifier(node.target) @@ -213,15 +222,6 @@ private class ILSerialiser extends ReadOnlyVisitor { node } - override def visitMemoryLoad(node: MemoryLoad): Expr = { - program ++= "MemoryLoad(" - visitMemory(node.mem) - program ++= ", [" - visitExpr(node.index) - program ++= "])" - node - } - override def visitMemory(node: Memory): Memory = { program ++= "Memory(" program ++= s"\"${node.name}\", ${node.addressSize}, ${node.valueSize})" diff --git a/src/main/scala/translating/IRToBoogie.scala b/src/main/scala/translating/IRToBoogie.scala index 1c917d038..ec9049334 100644 --- a/src/main/scala/translating/IRToBoogie.scala +++ b/src/main/scala/translating/IRToBoogie.scala @@ -733,15 +733,15 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti def translate(s: Statement): List[BCmd] = s match { case d: Call => translate(d) case _: NOP => List.empty - case m: MemoryAssign => + case m: MemoryStore => val lhs = m.mem.toBoogie val rhs = BMemoryStore(m.mem.toBoogie, m.index.toBoogie, m.value.toBoogie, m.endian, m.size) val lhsGamma = m.mem.toGamma val rhsGamma = GammaStore(m.mem.toGamma, m.index.toBoogie, exprToGamma(m.value), m.size, m.size / m.mem.valueSize) val store = AssignCmd(List(lhs, lhsGamma), List(rhs, rhsGamma)) val stateSplit = s match { - case MemoryAssign(_, _, _, _, _, Some(label)) => List(captureStateStatement(s"$label")) - case Assign(_, _, Some(label)) => List(captureStateStatement(s"$label")) + case MemoryStore(_, _, _, _, _, Some(label)) => List(captureStateStatement(s"$label")) + case LocalAssign(_, _, Some(label)) => List(captureStateStatement(s"$label")) case _ => List.empty } m.mem match { @@ -801,22 +801,29 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti } (List(rely, gammaValueCheck) ++ oldAssigns ++ oldGammaAssigns :+ store) ++ secureUpdate ++ guaranteeChecks ++ stateSplit } - case l: Assign => + case l: LocalAssign => val lhs = l.lhs.toBoogie val rhs = l.rhs.toBoogie val lhsGamma = l.lhs.toGamma val rhsGamma = exprToGamma(l.rhs) - val assign = AssignCmd(List(lhs, lhsGamma), List(rhs, rhsGamma)) - val loads = l.rhs.loads - if (loads.size > 1) { - throw Exception(s"$l contains multiple loads") + List(AssignCmd(List(lhs, lhsGamma), List(rhs, rhsGamma))) + case m: MemoryLoad => + val lhs = m.lhs.toBoogie + val lhsGamma = m.lhs.toGamma + val rhs = BMemoryLoad(m.mem.toBoogie, m.index.toBoogie, m.endian, m.size) + val rhsGamma = m.mem match { + case s: StackMemory => + GammaLoad(s.toGamma, m.index.toBoogie, m.size, m.size / s.valueSize) + case s: SharedMemory => + val boogieIndex = m.index.toBoogie + BinaryBExpr(BoolOR, GammaLoad(s.toGamma, boogieIndex, m.size, m.size / s.valueSize), L(LArgs, boogieIndex)) } - // add rely call if assignment contains a non-stack load - loads.headOption match { - case Some(MemoryLoad(SharedMemory(_, _, _), _, _, _)) => + val assign = AssignCmd(List(lhs, lhsGamma), List(rhs, rhsGamma)) + // add rely call if is a non-stack load + m.mem match { + case _: SharedMemory => List(BProcedureCall("rely"), assign) case _ => - // load is a stack load or doesn't exist List(assign) } case a: Assert => @@ -828,7 +835,7 @@ class IRToBoogie(var program: Program, var spec: Specification, var thread: Opti } def exprToGamma(e: Expr): BExpr = { - val gammaVars: Set[BExpr] = e.gammas.map(_.toGamma) ++ e.loads.map(_.toGamma(LArgs)) + val gammaVars: Set[BExpr] = e.gammas.map(_.toGamma) if (gammaVars.isEmpty) { TrueBLiteral } else if (gammaVars.size == 1) { diff --git a/src/main/scala/util/RunUtils.scala b/src/main/scala/util/RunUtils.scala index 0dec78187..24b5c9741 100644 --- a/src/main/scala/util/RunUtils.scala +++ b/src/main/scala/util/RunUtils.scala @@ -17,7 +17,7 @@ import ir.* import boogie.* import specification.* import Parsers.* -import Parsers.SemanticsParser.* +import Parsers.ASLpParser.* import analysis.data_structure_analysis.{DataStructureAnalysis, Graph, SymbolicAddress, SymbolicAddressAnalysis} import org.antlr.v4.runtime.tree.ParseTreeWalker import org.antlr.v4.runtime.BailErrorStrategy @@ -52,21 +52,21 @@ case class IRContext( /** Stores the results of the static analyses. */ case class StaticAnalysisContext( - constPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - IRconstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], - memoryRegionResult: Map[CFGPosition, Set[StackRegion]], - vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], - interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]], - paramResults: Map[Procedure, Set[Variable]], - steensgaardResults: Map[RegisterWrapperEqualSets, Set[RegisterWrapperEqualSets | MemoryRegion]], - mmmResults: MemoryModelMap, - reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], - varDepsSummaries: Map[Procedure, Map[Taintable, Set[Taintable]]], - regionInjector: Option[RegionInjector], - symbolicAddresses: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], - localDSA: Map[Procedure, Graph], - bottomUpDSA: Map[Procedure, Graph], - topDownDSA: Map[Procedure, Graph] + intraProcConstProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + interProcConstProp: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]], + memoryRegionResult: Map[CFGPosition, Set[StackRegion]], + vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]], + interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]], + paramResults: Map[Procedure, Set[Variable]], + steensgaardResults: Map[RegisterWrapperEqualSets, Set[RegisterWrapperEqualSets | MemoryRegion]], + mmmResults: MemoryModelMap, + reachingDefs: Map[CFGPosition, (Map[Variable, Set[Assign]], Map[Variable, Set[Assign]])], + varDepsSummaries: Map[Procedure, Map[Taintable, Set[Taintable]]], + regionInjector: Option[RegionInjector], + symbolicAddresses: Map[CFGPosition, Map[SymbolicAddress, TwoElement]], + localDSA: Map[Procedure, Graph], + bottomUpDSA: Map[Procedure, Graph], + topDownDSA: Map[Procedure, Graph] ) /** Results of the main program execution. @@ -123,9 +123,9 @@ object IRLoading { val semantics = mods.map(_.auxData("ast").data.toStringUtf8.parseJson.convertTo[Map[String, Array[Array[String]]]]) def parse_insn(line: String): StmtContext = { - val semanticsLexer = SemanticsLexer(CharStreams.fromString(line)) - val tokens = CommonTokenStream(semanticsLexer) - val parser = SemanticsParser(tokens) + val lexer = ASLpLexer(CharStreams.fromString(line)) + val tokens = CommonTokenStream(lexer) + val parser = ASLpParser(tokens) parser.setErrorHandler(BailErrorStrategy()) parser.setBuildParseTree(true) @@ -342,40 +342,40 @@ object StaticAnalysis { val RNASolver = RNAAnalysisSolver(IRProgram) val RNAResult = RNASolver.analyze() - Logger.debug("[!] Running Constant Propagation") - val constPropSolver = ConstantPropagationSolver(IRProgram) - val constPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = constPropSolver.analyze() + Logger.debug("[!] Running Inter-procedural Constant Propagation") + val interProcConstProp = InterProcConstantPropagation(IRProgram) + val interProcConstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = interProcConstProp.analyze() - config.analysisResultsPath.foreach(s => - writeToFile(printAnalysisResults(IRProgram, constPropResult), s"${s}OGconstprop$iteration.txt") - ) + config.analysisResultsPath.foreach { s => + writeToFile(printAnalysisResults(IRProgram, interProcConstPropResult), s"${s}OGconstprop$iteration.txt") + } Logger.debug("[!] Variable dependency summaries") val scc = stronglyConnectedComponents(CallGraph, List(IRProgram.mainProcedure)) val specGlobalAddresses = ctx.specification.globals.map(s => s.address -> s.name).toMap - val varDepsSummaries = VariableDependencyAnalysis(IRProgram, ctx.specification.globals, specGlobalAddresses, constPropResult, scc).analyze() + val varDepsSummaries = VariableDependencyAnalysis(IRProgram, ctx.specification.globals, specGlobalAddresses, interProcConstPropResult, scc).analyze() - val ilcpsolver = IRSimpleValueAnalysis.Solver(IRProgram) - val newCPResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = ilcpsolver.analyze() + val intraProcConstProp = IntraProcConstantPropagation(IRProgram) + val intraProcConstPropResult: Map[CFGPosition, Map[Variable, FlatElement[BitVecLiteral]]] = intraProcConstProp.analyze() - config.analysisResultsPath.foreach(s => - writeToFile(printAnalysisResults(IRProgram, newCPResult), s"${s}_new_ir_constprop$iteration.txt") - ) + config.analysisResultsPath.foreach { s => + writeToFile(printAnalysisResults(IRProgram, intraProcConstPropResult), s"${s}_new_ir_constprop$iteration.txt") + } - config.analysisDotPath.foreach(f => { + config.analysisDotPath.foreach { f => val dumpdomain = computeDomain[CFGPosition, CFGPosition](InterProcIRCursor, IRProgram.procedures) writeToFile(toDot(dumpdomain, InterProcIRCursor, Map.empty), s"${f}_new_ir_intercfg$iteration.dot") - }) + } val reachingDefinitionsAnalysisSolver = InterprocReachingDefinitionsAnalysisSolver(IRProgram) val reachingDefinitionsAnalysisResults = reachingDefinitionsAnalysisSolver.analyze() - config.analysisDotPath.foreach(s => { + config.analysisDotPath.foreach { s => writeToFile( toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> reachingDefinitionsAnalysisResults(b).toString).toMap, true), s"${s}_reachingDefinitions$iteration.dot" ) - }) + } val mmm = MemoryModelMap(globalOffsets) mmm.preLoadGlobals(mergedSubroutines, globalAddresses, globalSizes) @@ -387,14 +387,14 @@ object StaticAnalysis { } Logger.debug("[!] Running GRA") - val graSolver = GlobalRegionAnalysisSolver(IRProgram, domain.toSet, constPropResult, reachingDefinitionsAnalysisResults, mmm, previousVSAResults) + val graSolver = GlobalRegionAnalysisSolver(IRProgram, domain.toSet, interProcConstPropResult, reachingDefinitionsAnalysisResults, mmm, previousVSAResults) val graResult = graSolver.analyze() Logger.debug("[!] Running MRA") - val mraSolver = MemoryRegionAnalysisSolver(IRProgram, domain.toSet, globalAddresses, globalOffsets, mergedSubroutines, constPropResult, ANRResult, RNAResult, reachingDefinitionsAnalysisResults, graResult, mmm) + val mraSolver = MemoryRegionAnalysisSolver(IRProgram, domain.toSet, globalAddresses, globalOffsets, mergedSubroutines, interProcConstPropResult, ANRResult, RNAResult, reachingDefinitionsAnalysisResults, graResult, mmm) val mraResult = mraSolver.analyze() - config.analysisDotPath.foreach(s => { + config.analysisDotPath.foreach { s => writeToFile(dotCallGraph(IRProgram), s"${s}_callgraph$iteration.dot") writeToFile( dotBlockGraph(IRProgram, IRProgram.filter(_.isInstanceOf[Block]).map(b => b -> b.toString).toMap), @@ -402,7 +402,7 @@ object StaticAnalysis { ) writeToFile( - toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> newCPResult(b).toString).toMap), + toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> intraProcConstPropResult(b).toString).toMap), s"${s}_new_ir_constprop$iteration.dot" ) @@ -415,7 +415,7 @@ object StaticAnalysis { toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> graResult(b).toString).toMap), s"${s}_GRA$iteration.dot" ) - }) + } Logger.debug("[!] Running MMM") mmm.convertMemoryRegions(mraSolver.procedureToStackRegions, mraSolver.procedureToHeapRegions, mraResult, mraSolver.procedureToSharedRegions, graSolver.getDataMap, graResult) @@ -427,15 +427,15 @@ object StaticAnalysis { val steensgaardResults = steensgaardSolver.pointsTo() Logger.debug("[!] Running VSA") - val vsaSolver = ValueSetAnalysisSolver(IRProgram, mmm, constPropResult) + val vsaSolver = ValueSetAnalysisSolver(IRProgram, mmm, interProcConstPropResult) val vsaResult: Map[CFGPosition, LiftedElement[Map[Variable | MemoryRegion, Set[Value]]]] = vsaSolver.analyze() - config.analysisDotPath.foreach(s => { + config.analysisDotPath.foreach { s => writeToFile( toDot(IRProgram, IRProgram.filter(_.isInstanceOf[Command]).map(b => b -> vsaResult(b).toString).toMap), s"${s}_VSA$iteration.dot" ) - }) + } Logger.debug("[!] Injecting regions") val regionInjector = if (config.memoryRegions) { @@ -450,8 +450,8 @@ object StaticAnalysis { val interLiveVarsResults: Map[CFGPosition, Map[Variable, TwoElement]] = InterLiveVarsAnalysis(IRProgram).analyze() StaticAnalysisContext( - constPropResult = constPropResult, - IRconstPropResult = newCPResult, + intraProcConstProp = interProcConstPropResult, + interProcConstProp = intraProcConstPropResult, memoryRegionResult = mraResult, vsaResult = vsaResult, interLiveVarsResults = interLiveVarsResults, @@ -595,7 +595,7 @@ object RunUtils { Logger.debug("[!] Generating Procedure Summaries") if (config.summariseProcedures) { - IRTransform.generateProcedureSummaries(ctx, ctx.program, result.constPropResult, result.varDepsSummaries) + IRTransform.generateProcedureSummaries(ctx, ctx.program, result.intraProcConstProp, result.varDepsSummaries) } if (modified) { @@ -619,7 +619,7 @@ object RunUtils { Logger.debug("[!] Running Symbolic Access Analysis") val symResults: Map[CFGPosition, Map[SymbolicAddress, TwoElement]] = - SymbolicAddressAnalysis(ctx.program, analysisResult.last.IRconstPropResult).analyze() + SymbolicAddressAnalysis(ctx.program, analysisResult.last.interProcConstProp).analyze() config.analysisDotPath.foreach { s => val labels = symResults.map { (k, v) => k -> v.toString } writeToFile(toDot(ctx.program, labels), s"${s}_saa.dot") @@ -627,7 +627,7 @@ object RunUtils { Logger.debug("[!] Running DSA Analysis") val symbolTableEntries: Set[SymbolTableEntry] = ctx.globals ++ ctx.funcEntries - val dsa = DataStructureAnalysis(ctx.program, symResults, analysisResult.last.IRconstPropResult, symbolTableEntries, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, analysisResult.last.paramResults) + val dsa = DataStructureAnalysis(ctx.program, symResults, analysisResult.last.interProcConstProp, symbolTableEntries, ctx.globalOffsets, ctx.externalFunctions, reachingDefs, writesTo, analysisResult.last.paramResults) dsa.analyze() config.analysisDotPath.foreach { s => diff --git a/src/test/scala/DataStructureAnalysisTest.scala b/src/test/scala/DataStructureAnalysisTest.scala index bdedf8cfe..80d848821 100644 --- a/src/test/scala/DataStructureAnalysisTest.scala +++ b/src/test/scala/DataStructureAnalysisTest.scala @@ -246,15 +246,15 @@ class DataStructureAnalysisTest extends AnyFunSuite { test("internal merge") { // this is an internal merge (two cells of the same node overlap and are merged together) val mem = SharedMemory("mem", 64, 8) - val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) - val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) + val locAssign1 = LocalAssign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) + val locAssign2 = LocalAssign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) val program = prog( proc("main", block("operations", locAssign1, // R6 = R0 + 4 locAssign2, // R7 = R0 + 5 - MemoryAssign(mem, R7, R1, Endian.BigEndian, 64, Some("00003")), // *R7 = R1, (*R6 + 1) = R1 - MemoryAssign(mem, R6, R2, Endian.BigEndian, 64, Some("00004")), // *R6 = R2 + MemoryStore(mem, R7, R1, Endian.BigEndian, 64, Some("00003")), // *R7 = R1, (*R6 + 1) = R1 + MemoryStore(mem, R6, R2, Endian.BigEndian, 64, Some("00004")), // *R6 = R2 ret ) ) @@ -282,17 +282,17 @@ class DataStructureAnalysisTest extends AnyFunSuite { test("offsetting from middle of cell to a new cell") { val mem = SharedMemory("mem", 64, 8) - val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) - val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) - val locAssign3 = Assign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(8, 64)), Some("00005")) + val locAssign1 = LocalAssign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) + val locAssign2 = LocalAssign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) + val locAssign3 = LocalAssign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(8, 64)), Some("00005")) val program = prog( proc("main", block("operations", locAssign1, // R6 = R0 + 4 locAssign2, // R7 = R0 + 5 - MemoryAssign(mem, R7, R1, Endian.BigEndian, 64, Some("00003")), - MemoryAssign(mem, R6, R2, Endian.BigEndian, 64, Some("00004")), + MemoryStore(mem, R7, R1, Endian.BigEndian, 64, Some("00003")), + MemoryStore(mem, R6, R2, Endian.BigEndian, 64, Some("00004")), locAssign3, // R5 = R7 + 8 ret ) @@ -309,17 +309,17 @@ class DataStructureAnalysisTest extends AnyFunSuite { // similar to above except instead of creating new cell the last assign // points R5's cell at an internal offset of 8 val mem = SharedMemory("mem", 64, 8) - val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) - val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) - val locAssign3 = Assign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(7, 64)), Some("00005")) + val locAssign1 = LocalAssign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) + val locAssign2 = LocalAssign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) + val locAssign3 = LocalAssign(R5, BinaryExpr(BVADD, R7, BitVecLiteral(7, 64)), Some("00005")) val program = prog( proc("main", block("operations", locAssign1, locAssign2, - MemoryAssign(mem, R7, R1, Endian.BigEndian, 64, Some("00003")), - MemoryAssign(mem, R6, R2, Endian.BigEndian, 64, Some("00004")), + MemoryStore(mem, R7, R1, Endian.BigEndian, 64, Some("00003")), + MemoryStore(mem, R6, R2, Endian.BigEndian, 64, Some("00004")), locAssign3, ret ) @@ -341,9 +341,9 @@ class DataStructureAnalysisTest extends AnyFunSuite { test("internal offset transfer") { // this is a test to check assignments transfer internal offset of slices. val mem = SharedMemory("mem", 64, 8) - val locAssign1 = Assign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) - val locAssign2 = Assign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) - val locAssign3 = Assign(R5, R7, Some("00005")) + val locAssign1 = LocalAssign(R6, BinaryExpr(BVADD, R0, BitVecLiteral(4, 64)), Some("00001")) + val locAssign2 = LocalAssign(R7, BinaryExpr(BVADD, R0, BitVecLiteral(5, 64)), Some("00002")) + val locAssign3 = LocalAssign(R5, R7, Some("00005")) val program = prog( proc("main", @@ -351,8 +351,8 @@ class DataStructureAnalysisTest extends AnyFunSuite { // Assign(R0, MemoryLoad(mem, R0, BigEndian, 0), Some("00000")), locAssign1, locAssign2, - MemoryAssign(mem, R7, R1, Endian.BigEndian, 64, Some("00003")), - MemoryAssign(mem, R6, R2, Endian.BigEndian, 64, Some("00004")), + MemoryStore(mem, R7, R1, Endian.BigEndian, 64, Some("00003")), + MemoryStore(mem, R6, R2, Endian.BigEndian, 64, Some("00004")), locAssign3, ret ) diff --git a/src/test/scala/LiveVarsAnalysisTests.scala b/src/test/scala/LiveVarsAnalysisTests.scala index 762fd395b..bd7a5ed8e 100644 --- a/src/test/scala/LiveVarsAnalysisTests.scala +++ b/src/test/scala/LiveVarsAnalysisTests.scala @@ -1,6 +1,6 @@ import analysis.{InterLiveVarsAnalysis, TwoElementTop} import ir.dsl.* -import ir.{BitVecLiteral, BitVecType, dsl, Assign, LocalVar, Program, Register, Statement, Variable, transforms, cilvisitor, Procedure} +import ir.{BitVecLiteral, BitVecType, dsl, LocalAssign, LocalVar, Program, Register, Statement, Variable, transforms, cilvisitor, Procedure} import util.{Logger, LogLevel} import org.scalatest.funsuite.AnyFunSuite import test_util.BASILTest @@ -30,10 +30,10 @@ class LiveVarsAnalysisTests extends AnyFunSuite, BASILTest { def differentCalleesBothLive(): Unit = { val constant1 = bv64(1) - val r0ConstantAssign = Assign(R0, constant1, Some("00001")) - val r1ConstantAssign = Assign(R1, constant1, Some("00002")) - val r2r0Assign = Assign(R2, R0, Some("00003")) - val r2r1Assign = Assign(R2, R1, Some("00004")) + val r0ConstantAssign = LocalAssign(R0, constant1, Some("00001")) + val r1ConstantAssign = LocalAssign(R1, constant1, Some("00002")) + val r2r0Assign = LocalAssign(R2, R0, Some("00003")) + val r2r1Assign = LocalAssign(R2, R1, Some("00004")) val program: Program = prog( proc("main", @@ -70,11 +70,11 @@ class LiveVarsAnalysisTests extends AnyFunSuite, BASILTest { def differentCalleesOneAlive(): Unit = { val constant1 = bv64(1) - val r0ConstantAssign = Assign(R0, constant1, Some("00001")) - val r1ConstantAssign = Assign(R1, constant1, Some("00002")) - val r2r0Assign = Assign(R2, R0, Some("00003")) - val r2r1Assign = Assign(R2, R1, Some("00004")) - val r1Reassign = Assign(R1, BitVecLiteral(2, 64), Some("00005")) + val r0ConstantAssign = LocalAssign(R0, constant1, Some("00001")) + val r1ConstantAssign = LocalAssign(R1, constant1, Some("00002")) + val r2r0Assign = LocalAssign(R2, R0, Some("00003")) + val r2r1Assign = LocalAssign(R2, R1, Some("00004")) + val r1Reassign = LocalAssign(R1, BitVecLiteral(2, 64), Some("00005")) val program: Program = prog( proc("main", @@ -108,9 +108,9 @@ class LiveVarsAnalysisTests extends AnyFunSuite, BASILTest { def twoCallers(): Unit = { val constant1 = bv64(1) - val r0ConstantAssign = Assign(R0, constant1, Some("00001")) - val r1Assign = Assign(R0, R1, Some("00002")) - val r2Assign = Assign(R0, R2, Some("00003")) + val r0ConstantAssign = LocalAssign(R0, constant1, Some("00001")) + val r1Assign = LocalAssign(R0, R1, Some("00002")) + val r2Assign = LocalAssign(R0, R2, Some("00003")) val program = prog( proc("main", @@ -129,7 +129,7 @@ class LiveVarsAnalysisTests extends AnyFunSuite, BASILTest { createSimpleProc("callee3", Seq(r2Assign)), proc("wrapper1", block("wrapper1_first_call", - Assign(R1, constant1), + LocalAssign(R1, constant1), directCall("callee"), goto("wrapper1_second_call") ), @@ -140,7 +140,7 @@ class LiveVarsAnalysisTests extends AnyFunSuite, BASILTest { ), proc("wrapper2", block("wrapper2_first_call", - Assign(R2, constant1), + LocalAssign(R2, constant1), directCall("callee"), goto("wrapper2_second_call") ), block("wrapper2_second_call", @@ -167,11 +167,11 @@ class LiveVarsAnalysisTests extends AnyFunSuite, BASILTest { directCall("killer"), goto("aftercall") ), block("aftercall", - Assign(R0, R1), + LocalAssign(R0, R1), ret ) ), - createSimpleProc("killer", Seq(Assign(R1, bv64(1)))) + createSimpleProc("killer", Seq(LocalAssign(R1, bv64(1)))) ) cilvisitor.visit_prog(transforms.ReplaceReturns(), program) @@ -186,8 +186,8 @@ class LiveVarsAnalysisTests extends AnyFunSuite, BASILTest { } def simpleBranch(): Unit = { - val r1Assign = Assign(R0, R1, Some("00001")) - val r2Assign = Assign(R0, R2, Some("00002")) + val r1Assign = LocalAssign(R0, R1, Some("00001")) + val r2Assign = LocalAssign(R0, R2, Some("00002")) val program : Program = prog( proc( @@ -228,11 +228,11 @@ class LiveVarsAnalysisTests extends AnyFunSuite, BASILTest { proc("main", block( "lmain", - Assign(R0, R1), + LocalAssign(R0, R1), directCall("main"), goto("return") ), block("return", - Assign(R0, R2), + LocalAssign(R0, R2), ret ) ) @@ -251,7 +251,7 @@ class LiveVarsAnalysisTests extends AnyFunSuite, BASILTest { val program: Program = prog( proc("main", block("lmain", - Assign(R0, R1), + LocalAssign(R0, R1), goto("recursion", "non-recursion") ), block( @@ -259,7 +259,7 @@ class LiveVarsAnalysisTests extends AnyFunSuite, BASILTest { directCall("main"), goto("assign") ), block("assign", - Assign(R0, R2), + LocalAssign(R0, R2), goto("return") ), block( diff --git a/src/test/scala/PointsToTest.scala b/src/test/scala/PointsToTest.scala index d4b148d41..4b267d7e8 100644 --- a/src/test/scala/PointsToTest.scala +++ b/src/test/scala/PointsToTest.scala @@ -30,11 +30,11 @@ class PointsToTest extends AnyFunSuite with OneInstancePerTest { var program: Program = prog( proc("main", block("0x0", - Assign(R6, R31), + LocalAssign(R6, R31), goto("0x1") ), block("0x1", - MemoryAssign(mem, BinaryExpr(BVADD, R6, bv64(4)), bv64(10), LittleEndian, 64), + MemoryStore(mem, BinaryExpr(BVADD, R6, bv64(4)), bv64(10), LittleEndian, 64), goto("returntarget") ), block("returntarget", @@ -60,8 +60,8 @@ class PointsToTest extends AnyFunSuite with OneInstancePerTest { var program: Program = prog( proc("main", block("0x0", - Assign(R1, MemoryLoad(mem, BinaryExpr(BVADD, R31, bv64(6)), LittleEndian, 64)), - Assign(R3, MemoryLoad(mem, BinaryExpr(BVADD, R31, bv64(4)), LittleEndian, 64)), + MemoryLoad(R1, mem, BinaryExpr(BVADD, R31, bv64(6)), LittleEndian, 64), + MemoryLoad(R3, mem, BinaryExpr(BVADD, R31, bv64(4)), LittleEndian, 64), goto("0x1") ), block("0x1", @@ -135,8 +135,8 @@ class PointsToTest extends AnyFunSuite with OneInstancePerTest { val program: Program = prog( proc("main", block("0x0", - Assign(R0, MemoryLoad(mem, BinaryExpr(BVADD, R31, bv64(6)), LittleEndian, 64)), - Assign(R1, BinaryExpr(BVADD, R31, bv64(10))), + MemoryLoad(R0, mem, BinaryExpr(BVADD, R31, bv64(6)), LittleEndian, 64), + LocalAssign(R1, BinaryExpr(BVADD, R31, bv64(10))), goto("0x1") ), block("0x1", @@ -148,8 +148,8 @@ class PointsToTest extends AnyFunSuite with OneInstancePerTest { ), proc("p2", block("l_p2", - Assign(R3, R0), - Assign(R2, MemoryLoad(mem, R1, LittleEndian, 64)), + LocalAssign(R3, R0), + MemoryLoad(R2, mem, R1, LittleEndian, 64), goto("l_p2_1"), ), block("l_p2_1", @@ -184,8 +184,8 @@ class PointsToTest extends AnyFunSuite with OneInstancePerTest { val program: Program = prog( proc("main", block("0x0", - Assign(R0, MemoryLoad(mem, BinaryExpr(BVADD, R31, bv64(6)), LittleEndian, 64)), - Assign(R1, BinaryExpr(BVADD, R31, bv64(10))), + MemoryLoad(R0, mem, BinaryExpr(BVADD, R31, bv64(6)), LittleEndian, 64), + LocalAssign(R1, BinaryExpr(BVADD, R31, bv64(10))), goto("0x1") ), block("0x1", @@ -197,8 +197,8 @@ class PointsToTest extends AnyFunSuite with OneInstancePerTest { ), proc("foo", block("l_foo", - Assign(R0, MemoryLoad(mem, BinaryExpr(BVADD, R31, bv64(6)), LittleEndian, 64)), - Assign(R1, BinaryExpr(BVADD, R31, bv64(10))), + MemoryLoad(R0, mem, BinaryExpr(BVADD, R31, bv64(6)), LittleEndian, 64), + LocalAssign(R1, BinaryExpr(BVADD, R31, bv64(10))), directCall("p2"), goto("l_foo_1") ), block("l_foo_1", @@ -207,8 +207,8 @@ class PointsToTest extends AnyFunSuite with OneInstancePerTest { ), proc("p2", block("l_p2", - Assign(R3, R0), - Assign(R2, MemoryLoad(mem, R1, LittleEndian, 64)), + LocalAssign(R3, R0), + MemoryLoad(R2, mem, R1, LittleEndian, 64), goto("l_p2_1"), ), block("l_p2_1", diff --git a/src/test/scala/TaintAnalysisTests.scala b/src/test/scala/TaintAnalysisTests.scala index 35e7da1ab..2ffeb3215 100644 --- a/src/test/scala/TaintAnalysisTests.scala +++ b/src/test/scala/TaintAnalysisTests.scala @@ -7,12 +7,12 @@ import test_util.BASILTest class TaintAnalysisTests extends AnyFunSuite, BASILTest { def getTaintAnalysisResults(program: Program, taint: Map[CFGPosition, Set[Taintable]]): Map[CFGPosition, Set[Taintable]] = { - val constPropResults = ConstantPropagationSolver(program).analyze() + val constPropResults = InterProcConstantPropagation(program).analyze() TaintAnalysis(program, Map(), constPropResults, taint).analyze().map { (c, m) => (c, m.map { (v, _) => v }.toSet)} } def getVarDepResults(program: Program, procedure: Procedure): Map[CFGPosition, Map[Taintable, Set[Taintable]]] = { - val constPropResults = ConstantPropagationSolver(program).analyze() + val constPropResults = InterProcConstantPropagation(program).analyze() val variables = registers ProcVariableDependencyAnalysis(program, variables, Map(), constPropResults, Map(), procedure).analyze() } @@ -31,7 +31,7 @@ class TaintAnalysisTests extends AnyFunSuite, BASILTest { ), proc("f", block("assign", - Assign(R0, bv64(2), None), + LocalAssign(R0, bv64(2), None), goto("returnBlock"), ), block("returnBlock", @@ -65,7 +65,7 @@ class TaintAnalysisTests extends AnyFunSuite, BASILTest { ), proc("f", block("assign", - Assign(R0, BinaryExpr(BVADD, R0, R1), None), + LocalAssign(R0, BinaryExpr(BVADD, R0, R1), None), goto("returnBlock"), ), block("returnBlock", @@ -102,11 +102,11 @@ class TaintAnalysisTests extends AnyFunSuite, BASILTest { goto("a", "b"), ), block("a", - Assign(R0, R1, None), + LocalAssign(R0, R1, None), goto("returnBlock"), ), block("b", - Assign(R0, R2, None), + LocalAssign(R0, R2, None), goto("returnBlock"), ), block("returnBlock", @@ -143,12 +143,12 @@ class TaintAnalysisTests extends AnyFunSuite, BASILTest { goto("a", "b"), ), block("a", - Assign(R1, R1, None), + LocalAssign(R1, R1, None), directCall("g"), goto("returnBlock"), ), block("b", - Assign(R1, R2, None), + LocalAssign(R1, R2, None), directCall("g"), goto("returnBlock"), ), @@ -158,7 +158,7 @@ class TaintAnalysisTests extends AnyFunSuite, BASILTest { ), proc("g", block("body", - Assign(R0, R1, None), + LocalAssign(R0, R1, None), goto("returnBlock"), ), block("returnBlock", @@ -195,11 +195,11 @@ class TaintAnalysisTests extends AnyFunSuite, BASILTest { goto("a", "b"), ), block("a", - Assign(R0, BinaryExpr(BVADD, R0, R1), None), + LocalAssign(R0, BinaryExpr(BVADD, R0, R1), None), goto("branch"), ), block("b", - Assign(R0, R2, None), + LocalAssign(R0, R2, None), goto("returnBlock"), ), block("returnBlock", diff --git a/src/test/scala/ir/CILVisitorTest.scala b/src/test/scala/ir/CILVisitorTest.scala index f06528cbf..49eae1cb5 100644 --- a/src/test/scala/ir/CILVisitorTest.scala +++ b/src/test/scala/ir/CILVisitorTest.scala @@ -40,7 +40,7 @@ class AddGammas extends CILVisitor { override def vstmt(s: Statement) = { s match { - case a: Assign => ChangeTo(List(a, Assign(gamma_v(a.lhs), gamma_e(a.rhs)))) + case a: LocalAssign => ChangeTo(List(a, LocalAssign(gamma_v(a.lhs), gamma_e(a.rhs)))) case _ => SkipChildren() } @@ -82,10 +82,10 @@ class CILVisitorTest extends AnyFunSuite { val program: Program = prog( proc( "main", - block("0x0", Assign(getRegister("R6"), getRegister("R31")), goto("0x1")), + block("0x0", LocalAssign(getRegister("R6"), getRegister("R31")), goto("0x1")), block( "0x1", - MemoryAssign(mem, BinaryExpr(BVADD, getRegister("R6"), bv64(4)), bv64(10), Endian.LittleEndian, 64), + MemoryStore(mem, BinaryExpr(BVADD, getRegister("R6"), bv64(4)), bv64(10), Endian.LittleEndian, 64), goto("returntarget") ), block("returntarget", ret) @@ -123,10 +123,10 @@ class CILVisitorTest extends AnyFunSuite { val program: Program = prog( proc( "main", - block("0x0", Assign(getRegister("R6"), getRegister("R31")), goto("0x1")), + block("0x0", LocalAssign(getRegister("R6"), getRegister("R31")), goto("0x1")), block( "0x1", - MemoryAssign(mem, BinaryExpr(BVADD, getRegister("R6"), bv64(4)), bv64(10), Endian.LittleEndian, 64), + MemoryStore(mem, BinaryExpr(BVADD, getRegister("R6"), bv64(4)), bv64(10), Endian.LittleEndian, 64), goto("returntarget") ), block("returntarget", ret) diff --git a/src/test/scala/ir/IRTest.scala b/src/test/scala/ir/IRTest.scala index a1592f9eb..f156a83c0 100644 --- a/src/test/scala/ir/IRTest.scala +++ b/src/test/scala/ir/IRTest.scala @@ -95,12 +95,12 @@ class IRTest extends AnyFunSuite { val p = prog( proc("main", block("l_main", - Assign(R0, bv64(10)), - Assign(R1, bv64(10)), + LocalAssign(R0, bv64(10)), + LocalAssign(R1, bv64(10)), goto("newblock") ), block("l_main_1", - Assign(R0, bv64(22)), + LocalAssign(R0, bv64(22)), directCall("p2"), goto("returntarget") ), @@ -109,7 +109,7 @@ class IRTest extends AnyFunSuite { ) ), proc("p2", - block("l_p2", Assign(R0, bv64(10)), goto("l_p2_1")), + block("l_p2", LocalAssign(R0, bv64(10)), goto("l_p2_1")), block("l_p2_1", ret) ) ) @@ -154,15 +154,15 @@ class IRTest extends AnyFunSuite { ) val b2 = block("newblock2", - Assign(R0, bv64(22)), - Assign(R0, bv64(22)), - Assign(R0, bv64(22)), + LocalAssign(R0, bv64(22)), + LocalAssign(R0, bv64(22)), + LocalAssign(R0, bv64(22)), goto("lmain2") ).resolve(p) val b1 = block("newblock1", - Assign(R0, bv64(22)), - Assign(R0, bv64(22)), - Assign(R0, bv64(22)), + LocalAssign(R0, bv64(22)), + LocalAssign(R0, bv64(22)), + LocalAssign(R0, bv64(22)), goto("lmain2") ).resolve(p) @@ -191,16 +191,16 @@ class IRTest extends AnyFunSuite { ) val b1 = block("newblock2", - Assign(R0, bv64(22)), - Assign(R0, bv64(22)), - Assign(R0, bv64(22)), + LocalAssign(R0, bv64(22)), + LocalAssign(R0, bv64(22)), + LocalAssign(R0, bv64(22)), directCall("main"), unreachable ).resolve(p) val b2 = block("newblock1", - Assign(R0, bv64(22)), - Assign(R0, bv64(22)), - Assign(R0, bv64(22)), + LocalAssign(R0, bv64(22)), + LocalAssign(R0, bv64(22)), + LocalAssign(R0, bv64(22)), ret ).resolve(p) @@ -218,7 +218,7 @@ class IRTest extends AnyFunSuite { assert(called.incomingCalls().isEmpty) val b3 = block("newblock3", - Assign(R0, bv64(22)), + LocalAssign(R0, bv64(22)), directCall("called"), unreachable ).resolve(p) @@ -254,8 +254,8 @@ class IRTest extends AnyFunSuite { val p = prog( proc("main", block("l_main", - Assign(R0, bv64(10)), - Assign(R1, bv64(10)), + LocalAssign(R0, bv64(10)), + LocalAssign(R1, bv64(10)), goto("returntarget") ), block("returntarget", @@ -285,13 +285,13 @@ class IRTest extends AnyFunSuite { val p = prog( proc("p1", block("b1", - Assign(R0, bv64(10)), + LocalAssign(R0, bv64(10)), ret ) ), proc("main", block("l_main", - Assign(R0, bv64(10)), + LocalAssign(R0, bv64(10)), directCall("p1"), goto("returntarget") ), block("returntarget", diff --git a/src/test/scala/ir/SingleCallInvariant.scala b/src/test/scala/ir/SingleCallInvariant.scala index d8efb6fc2..4e0061424 100644 --- a/src/test/scala/ir/SingleCallInvariant.scala +++ b/src/test/scala/ir/SingleCallInvariant.scala @@ -10,13 +10,13 @@ class InvariantTest extends AnyFunSuite { var program: Program = prog( proc("main", block("first_call", - Assign(R0, bv64(10)), - Assign(R1, bv64(10)), + LocalAssign(R0, bv64(10)), + LocalAssign(R1, bv64(10)), directCall("callee1"), ret ), block("second_call", - Assign(R0, bv64(10)), + LocalAssign(R0, bv64(10)), directCall("callee2"), ret ), @@ -35,14 +35,14 @@ class InvariantTest extends AnyFunSuite { var program: Program = prog( proc("main", block("first_call", - Assign(R0, bv64(10)), + LocalAssign(R0, bv64(10)), directCall("callee2"), - Assign(R1, bv64(10)), + LocalAssign(R1, bv64(10)), directCall("callee1"), ret ), block("second_call", - Assign(R0, bv64(10)), + LocalAssign(R0, bv64(10)), ret ), block("returnBlock", @@ -60,13 +60,13 @@ class InvariantTest extends AnyFunSuite { var program: Program = prog( proc("main", block("first_call", - Assign(R0, bv64(10)), - Assign(R1, bv64(10)), + LocalAssign(R0, bv64(10)), + LocalAssign(R1, bv64(10)), ret ), block("second_call", directCall("callee2"), - Assign(R0, bv64(10)), + LocalAssign(R0, bv64(10)), ret ), block("returnBlock",