diff --git a/.github/workflows/update-submodules.yml b/.github/workflows/update-submodules.yml new file mode 100644 index 000000000..bf72e2513 --- /dev/null +++ b/.github/workflows/update-submodules.yml @@ -0,0 +1,65 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +# +# Copyright (c) 2011-2022 ETH Zurich. + +name: Update Submodules + +on: + workflow_dispatch: # allow to manually trigger this workflow + schedule: + - cron: '0 6 * * *' # run every day at 06:00 UTC + +jobs: + # Update silver and create a PR if there are any changes + update: + runs-on: ubuntu-latest + steps: + - name: Check out the repo + uses: actions/checkout@v3 + with: + submodules: true + + - name: Get current commits + run: echo "PREV_SILVER_REF=$(git -C silver rev-parse HEAD)" >> $GITHUB_ENV + + - name: Update Silver submodule + run: git checkout master && git pull + working-directory: silver + + - name: Get new commits + run: echo "CUR_SILVER_REF=$(git -C silver rev-parse HEAD)" >> $GITHUB_ENV + + - name: Create PR body + run: | + if [[ "${{ env.PREV_SILVER_REF }}" != "${{ env.CUR_SILVER_REF }}" ]]; then + echo 'PR_BODY_LINE=* Updates Silver from `${{ env.PREV_SILVER_REF }}` to `${{ env.CUR_SILVER_REF }}`.' >> $GITHUB_ENV + else + echo 'PR_BODY_LINE=' >> $GITHUB_ENV + fi + + - name: Open a pull request + id: pr + uses: peter-evans/create-pull-request@v4 + if: (env.PREV_SILVER_REF != env.CUR_SILVER_REF) + with: + # Use viper-admin's token to workaround a restriction of GitHub. + # See: https://github.com/peter-evans/create-pull-request/issues/48 + token: ${{ secrets.UPDATE_SILVER }} + commit-message: Updates submodules + title: Update Submodules + branch: auto-update-submodules + delete-branch: true + labels: | + automated pr + body: | + ${{ env.PR_BODY_LINE1 }} + + - name: Enable auto-merge of PR + uses: peter-evans/create-or-update-comment@v2 + if: (env.PREV_SILVER_REF != env.CUR_SILVER_REF) + with: + token: ${{ secrets.UPDATE_SILVER }} + issue-number: ${{ steps.pr.outputs.pull-request-number }}" + body: bors merge diff --git a/silver b/silver index 820db76e9..31c94df4f 160000 --- a/silver +++ b/silver @@ -1 +1 @@ -Subproject commit 820db76e92646c57b180c7ccdc8dee0465118e62 +Subproject commit 31c94df4f9792046618d9b4db52444ffe9c7c988 diff --git a/src/main/scala/Config.scala b/src/main/scala/Config.scala index 068bcc03e..871f42d5e 100644 --- a/src/main/scala/Config.scala +++ b/src/main/scala/Config.scala @@ -656,6 +656,12 @@ class Config(args: Seq[String]) extends SilFrontendConfig(args, "Silicon") { noshort = true ) + val moreJoins: ScallopOption[Boolean] = opt[Boolean]("moreJoins", + descr = "Enable more joins using a more complete implementation of state merging.", + default = Some(false), + noshort = true + ) + val exhaleModeOption: ScallopOption[ExhaleMode] = opt[ExhaleMode]("exhaleMode", descr = "Exhale mode. Options are 0 (greedy, default), 1 (more complete exhale), 2 (more complete exhale on demand).", default = None, @@ -684,8 +690,10 @@ class Config(args: Seq[String]) extends SilFrontendConfig(args, "Silicon") { )(singleArgConverter(mode => StateConsolidationMode(mode.toInt))) val numberOfParallelVerifiers: ScallopOption[Int] = opt[Int]("numberOfParallelVerifiers", - descr = ( "Number of verifiers run in parallel. This number plus one is the number of provers " - + s"run in parallel (default: ${Runtime.getRuntime.availableProcessors()})"), + descr = ( "Number of verifiers (and therefore also prover instances) run in parallel for method verification." + + "A value of 1 leads to sequential method verification. " + + "Functions and predicates are always verified sequentially on a separate prover instance. " + + s"Default: ${Runtime.getRuntime.availableProcessors()}"), default = Some(Runtime.getRuntime.availableProcessors()), noshort = true ) @@ -815,6 +823,11 @@ class Config(args: Seq[String]) extends SilFrontendConfig(args, "Silicon") { case _ => Right(()) } + validateOpt(numberOfParallelVerifiers) { + case Some(n) if n <= 0 => Left(s"Number of parallel verifiers must be positive, but $n was provided") + case _ => Right() + } + validateFileOpt(logConfig) validateFileOpt(setAxiomatizationFile) validateFileOpt(multisetAxiomatizationFile) diff --git a/src/main/scala/decider/Decider.scala b/src/main/scala/decider/Decider.scala index 785477bfd..f8f0f8e7c 100644 --- a/src/main/scala/decider/Decider.scala +++ b/src/main/scala/decider/Decider.scala @@ -22,6 +22,7 @@ import viper.silicon.verifier.{Verifier, VerifierComponent} import viper.silver.reporter.{ConfigurationConfirmation, InternalWarningMessage} import scala.collection.immutable.HashSet +import scala.collection.mutable /* * Interfaces @@ -40,7 +41,8 @@ trait Decider { def setPathConditionMark(): Mark def assume(t: Term): Unit - def assume(ts: InsertionOrderedSet[Term], enforceAssumption: Boolean = false): Unit + def assumeDefinition(t: Term): Unit + def assume(ts: InsertionOrderedSet[Term], enforceAssumption: Boolean = false, isDefinition: Boolean = false): Unit def assume(ts: Iterable[Term]): Unit def check(t: Term, timeout: Int): Boolean @@ -72,8 +74,10 @@ trait Decider { // slower, so this tradeoff seems worth it. def freshFunctions: Set[FunctionDecl] def freshMacros: Vector[MacroDecl] - def declareAndRecordAsFreshFunctions(functions: Set[FunctionDecl]): Unit - def declareAndRecordAsFreshMacros(functions: Vector[MacroDecl]): Unit + def declareAndRecordAsFreshFunctions(functions: Set[FunctionDecl], toStack: Boolean): Unit + def declareAndRecordAsFreshMacros(functions: Seq[MacroDecl], toStack: Boolean): Unit + def pushSymbolStack(): Unit + def popSymbolStack(): (Set[FunctionDecl], Seq[MacroDecl]) def setPcs(other: PathConditionStack): Unit def statistics(): Map[String, String] @@ -96,8 +100,11 @@ trait DefaultDeciderProvider extends VerifierComponent { this: Verifier => private var _prover: Prover = _ private var pathConditions: PathConditionStack = _ - private var _freshFunctions: Set[FunctionDecl] = _ /* [BRANCH-PARALLELISATION] */ - private var _freshMacros: Vector[MacroDecl] = _ + private var _declaredFreshFunctions: Set[FunctionDecl] = _ /* [BRANCH-PARALLELISATION] */ + private var _declaredFreshMacros: Vector[MacroDecl] = _ + + private var _freshFunctionStack: Stack[mutable.HashSet[FunctionDecl]] = _ + private var _freshMacroStack: Stack[mutable.ListBuffer[MacroDecl]] = _ def prover: Prover = _prover @@ -162,16 +169,20 @@ trait DefaultDeciderProvider extends VerifierComponent { this: Verifier => def start(): Unit = { pathConditions = new LayeredPathConditionStack() - _freshFunctions = if (Verifier.config.parallelizeBranches()) HashSet.empty else InsertionOrderedSet.empty /* [BRANCH-PARALLELISATION] */ - _freshMacros = Vector.empty + _declaredFreshFunctions = if (Verifier.config.parallelizeBranches()) HashSet.empty else InsertionOrderedSet.empty /* [BRANCH-PARALLELISATION] */ + _declaredFreshMacros = Vector.empty + _freshMacroStack = Stack.empty + _freshFunctionStack = Stack.empty createProver() } def reset(): Unit = { _prover.reset() pathConditions = new LayeredPathConditionStack() - _freshFunctions = if (Verifier.config.parallelizeBranches()) HashSet.empty else InsertionOrderedSet.empty /* [BRANCH-PARALLELISATION] */ - _freshMacros = Vector.empty + _declaredFreshFunctions = if (Verifier.config.parallelizeBranches()) HashSet.empty else InsertionOrderedSet.empty /* [BRANCH-PARALLELISATION] */ + _declaredFreshMacros = Vector.empty + _freshMacroStack = Stack.empty + _freshFunctionStack = Stack.empty } def stop(): Unit = { @@ -209,23 +220,30 @@ trait DefaultDeciderProvider extends VerifierComponent { this: Verifier => assume(InsertionOrderedSet(Seq(t)), false) } + override def assumeDefinition(t: Term): Unit = + assume(InsertionOrderedSet(Seq(t)), false, true) + def assume(terms: Iterable[Term]): Unit = assume(InsertionOrderedSet(terms), false) - def assume(terms: InsertionOrderedSet[Term], enforceAssumption: Boolean = false): Unit = { + def assume(terms: InsertionOrderedSet[Term], enforceAssumption: Boolean = false, isDefinition: Boolean = false): Unit = { val filteredTerms = if (enforceAssumption) terms else terms filterNot isKnownToBeTrue - if (filteredTerms.nonEmpty) assumeWithoutSmokeChecks(filteredTerms) + if (filteredTerms.nonEmpty) assumeWithoutSmokeChecks(filteredTerms, isDefinition) } - private def assumeWithoutSmokeChecks(terms: InsertionOrderedSet[Term]) = { + private def assumeWithoutSmokeChecks(terms: InsertionOrderedSet[Term], isDefinition: Boolean = false) = { val assumeRecord = new DeciderAssumeRecord(terms) val sepIdentifier = symbExLog.openScope(assumeRecord) /* Add terms to Silicon-managed path conditions */ - terms foreach pathConditions.add + if (isDefinition) { + terms foreach pathConditions.addDefinition + } else { + terms foreach pathConditions.add + } /* Add terms to the prover's assumptions */ terms foreach prover.assume @@ -320,7 +338,8 @@ trait DefaultDeciderProvider extends VerifierComponent { this: Verifier => prover.declare(macroDecl) - _freshMacros = _freshMacros :+ macroDecl /* [BRANCH-PARALLELISATION] */ + _declaredFreshMacros = _declaredFreshMacros :+ macroDecl /* [BRANCH-PARALLELISATION] */ + _freshMacroStack.foreach(l => l.append(macroDecl)) macroDecl } @@ -356,26 +375,61 @@ trait DefaultDeciderProvider extends VerifierComponent { this: Verifier => HeapDepFun(proverFun.id, proverFun.argSorts, proverFun.resultSort).asInstanceOf[F] } - _freshFunctions = _freshFunctions + FunctionDecl(fun) /* [BRANCH-PARALLELISATION] */ + val decl = FunctionDecl(fun) + _declaredFreshFunctions = _declaredFreshFunctions + decl /* [BRANCH-PARALLELISATION] */ + _freshFunctionStack.foreach(s => s.add(decl)) fun } /* [BRANCH-PARALLELISATION] */ - def freshFunctions: Set[FunctionDecl] = _freshFunctions - def freshMacros: Vector[MacroDecl] = _freshMacros + def freshFunctions: Set[FunctionDecl] = _declaredFreshFunctions + def freshMacros: Vector[MacroDecl] = _declaredFreshMacros + + def declareAndRecordAsFreshFunctions(functions: Set[FunctionDecl], toSymbolStack: Boolean): Unit = { + if (!toSymbolStack) { + functions foreach prover.declare + + _declaredFreshFunctions = _declaredFreshFunctions ++ functions + } else { + for (f <- functions) { + if (!_declaredFreshFunctions.contains(f)) + prover.declare(f) + + _declaredFreshFunctions = _declaredFreshFunctions + f + _freshFunctionStack.foreach(s => s.add(f)) + } + } + } + + def declareAndRecordAsFreshMacros(macros: Seq[MacroDecl], toStack: Boolean): Unit = { + if (!toStack) { + macros foreach prover.declare - def declareAndRecordAsFreshFunctions(functions: Set[FunctionDecl]): Unit = { - functions foreach prover.declare + _declaredFreshMacros = _declaredFreshMacros ++ macros + } else { + for (m <- macros) { + if (!_declaredFreshMacros.contains(m)) + prover.declare(m) - _freshFunctions = _freshFunctions ++ functions + _declaredFreshMacros = _declaredFreshMacros.appended(m) + _freshMacroStack.foreach(l => l.append(m)) + } + } } - def declareAndRecordAsFreshMacros(macros: Vector[MacroDecl]): Unit = { - macros foreach prover.declare + def pushSymbolStack(): Unit = { + _freshFunctionStack = _freshFunctionStack.prepended(mutable.HashSet()) + _freshMacroStack = _freshMacroStack.prepended(mutable.ListBuffer()) + } - _freshMacros = _freshMacros ++ macros + def popSymbolStack(): (Set[FunctionDecl], Seq[MacroDecl]) = { + val funcDecls = _freshFunctionStack.head.toSet + _freshFunctionStack = _freshFunctionStack.tail + val macroDecls = _freshMacroStack.head.toSeq + _freshMacroStack = _freshMacroStack.tail + (funcDecls, macroDecls) } /* Misc */ diff --git a/src/main/scala/decider/PathConditions.scala b/src/main/scala/decider/PathConditions.scala index 8f53ac589..4d4829735 100644 --- a/src/main/scala/decider/PathConditions.scala +++ b/src/main/scala/decider/PathConditions.scala @@ -24,8 +24,11 @@ trait RecordedPathConditions { def branchConditions: Stack[Term] def branchConditionExps: Stack[Option[ast.Exp]] def assumptions: InsertionOrderedSet[Term] + def definingAssumptions: InsertionOrderedSet[Term] def declarations: InsertionOrderedSet[Decl] + def definitionsOnly: RecordedPathConditions + def contains(assumption: Term): Boolean def conditionalized: Seq[Term] @@ -42,6 +45,7 @@ trait RecordedPathConditions { trait PathConditionStack extends RecordedPathConditions { def setCurrentBranchCondition(condition: Term, conditionExp: Option[ast.Exp]): Unit def add(assumption: Term): Unit + def addDefinition(assumption: Term): Unit def add(declaration: Decl): Unit def pushScope(): Unit def popScope(): Unit @@ -64,17 +68,31 @@ private class PathConditionStackLayer private var _branchConditionExp: Option[Option[ast.Exp]] = None private var _globalAssumptions: InsertionOrderedSet[Term] = InsertionOrderedSet.empty private var _nonGlobalAssumptions: InsertionOrderedSet[Term] = InsertionOrderedSet.empty + private var _globalDefiningAssumptions: InsertionOrderedSet[Term] = InsertionOrderedSet.empty + private var _nonGlobalDefiningAssumptions: InsertionOrderedSet[Term] = InsertionOrderedSet.empty private var _declarations: InsertionOrderedSet[Decl] = InsertionOrderedSet.empty def branchCondition: Option[Term] = _branchCondition def branchConditionExp: Option[Option[ast.Exp]] = _branchConditionExp def globalAssumptions: InsertionOrderedSet[Term] = _globalAssumptions + def globalDefiningAssumptions: InsertionOrderedSet[Term] = _globalDefiningAssumptions + def nonGlobalDefiningAssumptions: InsertionOrderedSet[Term] = _nonGlobalDefiningAssumptions def nonGlobalAssumptions: InsertionOrderedSet[Term] = _nonGlobalAssumptions def declarations: InsertionOrderedSet[Decl] = _declarations def assumptions: InsertionOrderedSet[Term] = globalAssumptions ++ nonGlobalAssumptions def pathConditions: InsertionOrderedSet[Term] = assumptions ++ branchCondition + def definitionsOnly(): PathConditionStackLayer = { + val result = new PathConditionStackLayer + result._globalAssumptions = _globalDefiningAssumptions + result._globalDefiningAssumptions = _globalDefiningAssumptions + result._nonGlobalAssumptions = _nonGlobalDefiningAssumptions + result._nonGlobalDefiningAssumptions = _nonGlobalDefiningAssumptions + result._declarations = _declarations + result + } + def branchCondition_=(condition: Term): Unit = { assert(_branchCondition.isEmpty, s"Branch condition is already set (to ${_branchCondition.get}), " @@ -104,6 +122,20 @@ private class PathConditionStackLayer _nonGlobalAssumptions += assumption } + def addDefinition(assumption: Term): Unit = { + assert( + !assumption.isInstanceOf[And], + s"Unexpectedly found a conjunction (should have been split): $assumption") + + if (PathConditions.isGlobal(assumption)) { + _globalAssumptions += assumption + _globalDefiningAssumptions += assumption + } else { + _nonGlobalAssumptions += assumption + _nonGlobalDefiningAssumptions += assumption + } + } + def add(declaration: Decl): Unit = _declarations += declaration def contains(pathCondition: Term): Boolean = { @@ -134,6 +166,9 @@ private trait LayeredPathConditionStackLike { protected def assumptions(layers: Stack[PathConditionStackLayer]): InsertionOrderedSet[Term] = InsertionOrderedSet(layers.flatMap(_.assumptions)) // Note: Performance? + protected def definingAssumptions(layers: Stack[PathConditionStackLayer]): InsertionOrderedSet[Term] = + InsertionOrderedSet(layers.flatMap(_.globalDefiningAssumptions) ++ layers.flatMap(_.nonGlobalDefiningAssumptions)) // Note: Performance? + protected def declarations(layers: Stack[PathConditionStackLayer]): InsertionOrderedSet[Decl] = InsertionOrderedSet(layers.flatMap(_.declarations)) // Note: Performance? @@ -176,13 +211,24 @@ private trait LayeredPathConditionStackLike { val ignores = ignore.topLevelConjuncts for (layer <- layers) { - globals ++= layer.globalAssumptions + val actualBranchCondition = layer.branchCondition.getOrElse(True) + val relevantNonGlobals = layer.nonGlobalAssumptions -- ignores + val (trueNonGlobals, additionalGlobals) = if (!actualBranchCondition.existsDefined{ case t if qvars.contains(t) => }) { + // The branch condition is independent of all quantified variables + // Any assumptions that are also independent of all quantified variables can be treated as global assumptions. + val (trueNonGlobals, unconditionalGlobals) = relevantNonGlobals.partition(a => a.existsDefined{ case t if qvars.contains(t) => }) + (trueNonGlobals, unconditionalGlobals.map(Implies(actualBranchCondition, _))) + } else { + (relevantNonGlobals, Seq()) + } + + globals ++= layer.globalAssumptions ++ additionalGlobals nonGlobals :+= Quantification( quantifier, qvars, - Implies(layer.branchCondition.getOrElse(True), And(layer.nonGlobalAssumptions -- ignores)), + Implies(actualBranchCondition, And(trueNonGlobals)), triggers, name, isGlobal) @@ -199,12 +245,17 @@ private class DefaultRecordedPathConditions(from: Stack[PathConditionStackLayer] val branchConditions: Stack[Term] = branchConditions(from) val branchConditionExps: Stack[Option[ast.Exp]] = branchConditionExps(from) val assumptions: InsertionOrderedSet[Term] = assumptions(from) + val definingAssumptions: InsertionOrderedSet[Term] = definingAssumptions(from) val declarations: InsertionOrderedSet[Decl] = declarations(from) def contains(assumption: Term): Boolean = contains(from, assumption) val conditionalized: Seq[Term] = conditionalized(from) + def definitionsOnly(): RecordedPathConditions = { + new DefaultRecordedPathConditions(from.map(_.definitionsOnly)) + } + def quantified(quantifier: Quantifier, qvars: Seq[Var], triggers: Seq[Trigger], @@ -248,6 +299,15 @@ private[decider] class LayeredPathConditionStack allAssumptions ++= tlcs } + def addDefinition(assumption: Term): Unit = { + /* TODO: Would be cleaner to not add assumptions that are already set as branch conditions */ + + val tlcs = assumption.topLevelConjuncts + + tlcs foreach layers.head.addDefinition + allAssumptions ++= tlcs + } + def add(declaration: Decl): Unit = { layers.head.add(declaration) } @@ -311,6 +371,9 @@ private[decider] class LayeredPathConditionStack def declarations: InsertionOrderedSet[Decl] = InsertionOrderedSet(layers.flatMap(_.declarations)) // Note: Performance? + def definingAssumptions: InsertionOrderedSet[Term] = + InsertionOrderedSet(layers.flatMap(_.globalDefiningAssumptions) ++ layers.flatMap(_.nonGlobalDefiningAssumptions)) // Note: Performance? + def contains(assumption: Term): Boolean = allAssumptions.contains(assumption) def conditionalized: Seq[Term] = conditionalized(layers) @@ -357,6 +420,14 @@ private[decider] class LayeredPathConditionStack clonedStack } + override def definitionsOnly: RecordedPathConditions = { + val result = duplicate() + result.layers = layers map (_.definitionsOnly()) + result.allAssumptions = InsertionOrderedSet(layers.flatMap(_.globalDefiningAssumptions) ++ + layers.flatMap(_.nonGlobalDefiningAssumptions)) + result + } + override def toString: String = { val sb = new StringBuilder(s"${this.getClass.getSimpleName}:\n") val sep = s" ${"-" * 10}\n" diff --git a/src/main/scala/rules/Brancher.scala b/src/main/scala/rules/Brancher.scala index 625ca4918..f482a2eae 100644 --- a/src/main/scala/rules/Brancher.scala +++ b/src/main/scala/rules/Brancher.scala @@ -64,7 +64,7 @@ object brancher extends BranchingRules { || skipPathFeasibilityCheck || !v.decider.check(condition, Verifier.config.checkTimeout())) - val parallelizeElseBranch = s.parallelizeBranches && !s.underJoin && executeThenBranch && executeElseBranch + val parallelizeElseBranch = s.parallelizeBranches && executeThenBranch && executeElseBranch // val additionalPaths = // if (executeThenBranch && exploreFalseBranch) 1 @@ -85,7 +85,11 @@ object brancher extends BranchingRules { val uidBranchPoint = v.symbExLog.insertBranchPoint(2, Some(condition), conditionExp) var functionsOfCurrentDecider: Set[FunctionDecl] = null var macrosOfCurrentDecider: Vector[MacroDecl] = null + var wasElseExecutedOnDifferentVerifier = false + var functionsOfElseBranchDecider: Set[FunctionDecl] = null + var macrosOfElseBranchDecider: Seq[MacroDecl] = null var pcsForElseBranch: PathConditionStack = null + var noOfErrors = 0 val elseBranchVerificationTask: Verifier => VerificationResult = if (executeElseBranch) { @@ -101,6 +105,7 @@ object brancher extends BranchingRules { functionsOfCurrentDecider = v.decider.freshFunctions macrosOfCurrentDecider = v.decider.freshMacros pcsForElseBranch = v.decider.pcs.duplicate() + noOfErrors = v.errorsReportedSoFar.get() } (v0: Verifier) => { @@ -109,18 +114,22 @@ object brancher extends BranchingRules { if (v.uniqueId != v0.uniqueId){ /* [BRANCH-PARALLELISATION] */ // executing the else branch on a different verifier, need to adapt the state + wasElseExecutedOnDifferentVerifier = true + if (s.underJoin) + v0.decider.pushSymbolStack() val newFunctions = functionsOfCurrentDecider -- v0.decider.freshFunctions val newMacros = macrosOfCurrentDecider.diff(v0.decider.freshMacros) v0.decider.prover.comment(s"[Shifting execution from ${v.uniqueId} to ${v0.uniqueId}]") v0.decider.prover.comment(s"Bulk-declaring functions") - v0.decider.declareAndRecordAsFreshFunctions(newFunctions) + v0.decider.declareAndRecordAsFreshFunctions(newFunctions, false) v0.decider.prover.comment(s"Bulk-declaring macros") - v0.decider.declareAndRecordAsFreshMacros(newMacros) + v0.decider.declareAndRecordAsFreshMacros(newMacros, false) v0.decider.prover.comment(s"Taking path conditions from source verifier ${v.uniqueId}") v0.decider.setPcs(pcsForElseBranch) + v0.errorsReportedSoFar.set(noOfErrors) } elseBranchVerifier = v0.uniqueId @@ -131,7 +140,13 @@ object brancher extends BranchingRules { if (v.uniqueId != v0.uniqueId) v1.decider.prover.saturate(Verifier.config.proverSaturationTimeouts.afterContract) - fElse(v1.stateConsolidator.consolidateIfRetrying(s1, v1), v1) + val result = fElse(v1.stateConsolidator.consolidateIfRetrying(s1, v1), v1) + if (wasElseExecutedOnDifferentVerifier && s.underJoin) { + val newSymbols = v1.decider.popSymbolStack() + functionsOfElseBranchDecider = newSymbols._1 + macrosOfElseBranchDecider = newSymbols._2 + } + result }) } } else { @@ -179,6 +194,7 @@ object brancher extends BranchingRules { try { if (parallelizeElseBranch) { val pcsAfterThenBranch = v.decider.pcs.duplicate() + val noOfErrorsAfterThenBranch = v.errorsReportedSoFar.get() val pcsBefore = v.decider.pcs @@ -188,6 +204,7 @@ object brancher extends BranchingRules { // we have done other work during the join, need to reset v.decider.prover.comment(s"Resetting path conditions after interruption") v.decider.setPcs(pcsAfterThenBranch) + v.errorsReportedSoFar.set(noOfErrorsAfterThenBranch) v.decider.prover.saturate(Verifier.config.proverSaturationTimeouts.afterContract) } }else{ @@ -209,6 +226,14 @@ object brancher extends BranchingRules { }, alwaysWaitForOther = parallelizeElseBranch) v.symbExLog.endBranchPoint(uidBranchPoint) + if (wasElseExecutedOnDifferentVerifier && s.underJoin) { + + v.decider.prover.comment(s"[To continue after join, adding else branch functions and macros to current verifier.]") + v.decider.prover.comment(s"Bulk-declaring functions") + v.decider.declareAndRecordAsFreshFunctions(functionsOfElseBranchDecider, true) + v.decider.prover.comment(s"Bulk-declaring macros") + v.decider.declareAndRecordAsFreshMacros(macrosOfElseBranchDecider, true) + } res } } diff --git a/src/main/scala/rules/ChunkSupporter.scala b/src/main/scala/rules/ChunkSupporter.scala index fe88e765b..fd438a99f 100644 --- a/src/main/scala/rules/ChunkSupporter.scala +++ b/src/main/scala/rules/ChunkSupporter.scala @@ -108,8 +108,7 @@ object chunkSupporter extends ChunkSupportRules { Q(s1, h, optCh.flatMap(ch => Some(ch.snap)), v1)) } else { executionFlowController.tryOrFail2[Heap, Option[Term]](s.copy(h = h), v)((s1, v1, QS) => - // 2022-05-07 MHS: MoreCompleteExhale isn't yet integrated into function verification, hence the limitation to method verification - if (s.isMethodVerification && s1.moreCompleteExhale) { + if (s1.moreCompleteExhale) { moreCompleteExhaleSupporter.consumeComplete(s1, s1.h, resource, args, perms, ve, v1)((s2, h2, snap2, v2) => { QS(s2.copy(h = s.h), h2, snap2, v2) }) @@ -206,7 +205,7 @@ object chunkSupporter extends ChunkSupportRules { executionFlowController.tryOrFail2[Heap, Term](s.copy(h = h), v)((s1, v1, QS) => { val lookupFunction = - if (s.isMethodVerification && s1.moreCompleteExhale) moreCompleteExhaleSupporter.lookupComplete _ + if (s1.moreCompleteExhale) moreCompleteExhaleSupporter.lookupComplete _ else lookupGreedy _ lookupFunction(s1, s1.h, resource, args, ve, v1)((s2, tSnap, v2) => QS(s2.copy(h = s.h), s2.h, tSnap, v2)) @@ -223,8 +222,8 @@ object chunkSupporter extends ChunkSupportRules { : VerificationResult = { val id = ChunkIdentifier(resource, s.program) - - findChunk[NonQuantifiedChunk](h.values, id, args, v) match { + val findRes = findChunk[NonQuantifiedChunk](h.values, id, args, v) + findRes match { case Some(ch) if v.decider.check(IsPositive(ch.perm), Verifier.config.checkTimeout()) => Q(s, ch.snap, v) case _ if v.decider.checkSmoke(true) => diff --git a/src/main/scala/rules/Consumer.scala b/src/main/scala/rules/Consumer.scala index d6f6be1b4..b10d19468 100644 --- a/src/main/scala/rules/Consumer.scala +++ b/src/main/scala/rules/Consumer.scala @@ -183,6 +183,11 @@ object consumer extends ConsumptionRules { v.logger.debug("hR = " + s.reserveHeaps.map(v.stateFormatter.format).mkString("", ",\n ", "")) val consumed = a match { + case imp @ ast.Implies(e0, a0) if !a.isPure && s.moreJoins => + val impliesRecord = new ImpliesRecord(imp, s, v.decider.pcs, "consume") + val uidImplies = v.symbExLog.openScope(impliesRecord) + consumeConditionalTlcMoreJoins(s, h, e0, a0, None, uidImplies, pve, v)(Q) + case imp @ ast.Implies(e0, a0) if !a.isPure => val impliesRecord = new ImpliesRecord(imp, s, v.decider.pcs, "consume") val uidImplies = v.symbExLog.openScope(impliesRecord) @@ -198,6 +203,11 @@ object consumer extends ConsumptionRules { Q(s2, h, Unit, v2) })) + case ite @ ast.CondExp(e0, a1, a2) if !a.isPure && s.moreJoins => + val condExpRecord = new CondExpRecord(ite, s, v.decider.pcs, "consume") + val uidCondExp = v.symbExLog.openScope(condExpRecord) + consumeConditionalTlcMoreJoins(s, h, e0, a1, Some(a2), uidCondExp, pve, v)(Q) + case ite @ ast.CondExp(e0, a1, a2) if !a.isPure => val condExpRecord = new CondExpRecord(ite, s, v.decider.pcs, "consume") val uidCondExp = v.symbExLog.openScope(condExpRecord) @@ -453,6 +463,53 @@ object consumer extends ConsumptionRules { consumed } + private def consumeConditionalTlcMoreJoins(s: State, h: Heap, e0: ast.Exp, a1: ast.Exp, a2: Option[ast.Exp], scopeUid: Int, + pve: PartialVerificationError, v: Verifier) + (Q: (State, Heap, Term, Verifier) => VerificationResult) + : VerificationResult = { + eval(s, e0, pve, v)((s1, t0, v1) => + joiner.join[(Heap, Term), (Heap, Term)](s1, v1, resetState = false)((s1, v1, QB) => { + branch(s1.copy(parallelizeBranches = false), t0, Some(e0), v1)( + (s2, v2) => + consumeR(s2.copy(parallelizeBranches = s1.parallelizeBranches), h, a1, pve, v2)((s3, h1, t1, v3) => { + v3.symbExLog.closeScope(scopeUid) + QB(s3, (h1, t1), v3) + }), + (s2, v2) => + a2 match { + case Some(a2) => consumeR(s2.copy(parallelizeBranches = s1.parallelizeBranches), h, a2, pve, v2)((s3, h1, t1, v3) => { + v3.symbExLog.closeScope(scopeUid) + QB(s3, (h1, t1), v3) + }) + case None => + v2.symbExLog.closeScope(scopeUid) + QB(s2.copy(parallelizeBranches = s1.parallelizeBranches), (h, Unit), v2) + }) + })(entries => { + val s2 = entries match { + case Seq(entry) => // One branch is dead + (entry.s, entry.data) + case Seq(entry1, entry2) => // Both branches are alive + val mergedData = ( + State.mergeHeap( + entry1.data._1, And(entry1.pathConditions.branchConditions), + entry2.data._1, And(entry2.pathConditions.branchConditions), + ), + // Asume that entry1.pcs is inverse of entry2.pcs + Ite(And(entry1.pathConditions.branchConditions), entry1.data._2, entry2.data._2) + ) + (entry1.pathConditionAwareMerge(entry2, v1), mergedData) + case _ => + sys.error(s"Unexpected join data entries: $entries") + } + s2 + })((s4, data, v4) => { + Q(s4, data._1, data._2, v4) + }) + ) + } + + private def evalAndAssert(s: State, e: ast.Exp, pve: PartialVerificationError, v: Verifier) (Q: (State, Term, Verifier) => VerificationResult) : VerificationResult = { diff --git a/src/main/scala/rules/Evaluator.scala b/src/main/scala/rules/Evaluator.scala index 132cc129a..1ec45759a 100644 --- a/src/main/scala/rules/Evaluator.scala +++ b/src/main/scala/rules/Evaluator.scala @@ -171,7 +171,7 @@ object evaluator extends EvaluationRules { case _: ast.WildcardPerm => val (tVar, tConstraints) = v.decider.freshARP() - v.decider.assume(tConstraints) + v.decider.assumeDefinition(tConstraints) /* TODO: Only record wildcards in State.constrainableARPs that are used in exhale * position. Currently, wildcards used in inhale position (only) may not be removed * from State.constrainableARPs (potentially inefficient, but should be sound). @@ -292,7 +292,7 @@ object evaluator extends EvaluationRules { case ast.Let(x, e0, e1) => eval(s, e0, pve, v)((s1, t0, v1) => { val t = v1.decider.appliedFresh("letvar", v1.symbolConverter.toSort(x.typ), s1.relevantQuantifiedVariables) - v1.decider.assume(t === t0) + v1.decider.assumeDefinition(t === t0) val newFuncRec = s1.functionRecorder.recordFreshSnapshot(t.applicable.asInstanceOf[Function]) eval(s1.copy(g = s1.g + (x.localVar, t0), functionRecorder = newFuncRec), e1, pve, v1)(Q) }) @@ -330,9 +330,9 @@ object evaluator extends EvaluationRules { val uidCondExp = v.symbExLog.openScope(condExpRecord) eval(s, e0, pve, v)((s1, t0, v1) => joiner.join[Term, Term](s1, v1)((s2, v2, QB) => - brancher.branch(s2, t0, Some(e0), v2)( - (s3, v3) => eval(s3, e1, pve, v3)(QB), - (s3, v3) => eval(s3, e2, pve, v3)(QB)) + brancher.branch(s2.copy(parallelizeBranches = false), t0, Some(e0), v2)( + (s3, v3) => eval(s3.copy(parallelizeBranches = s2.parallelizeBranches), e1, pve, v3)(QB), + (s3, v3) => eval(s3.copy(parallelizeBranches = s2.parallelizeBranches), e2, pve, v3)(QB)) )(entries => { /* TODO: If branch(...) took orElse-continuations that are executed if a branch is dead, then then comparisons with t0/Not(t0) wouldn't be necessary. */ @@ -762,7 +762,8 @@ object evaluator extends EvaluationRules { * of the recorded snapshots - which is wrong (probably only * incomplete). */ - smDomainNeeded = true) + smDomainNeeded = true, + moreJoins = false) consumes(s3, pres, _ => pvePre, v2)((s4, snap, v3) => { val snap1 = snap.convert(sorts.Snap) val preFApp = App(functionSupporter.preconditionVersion(v3.symbolConverter.toFunction(func)), snap1 :: tArgs) @@ -776,7 +777,8 @@ object evaluator extends EvaluationRules { recordVisited = s2.recordVisited, functionRecorder = fr5, smDomainNeeded = s2.smDomainNeeded, - hackIssue387DisablePermissionConsumption = s.hackIssue387DisablePermissionConsumption) + hackIssue387DisablePermissionConsumption = s.hackIssue387DisablePermissionConsumption, + moreJoins = s2.moreJoins) QB(s5, tFApp, v3)}) /* TODO: The join-function is heap-independent, and it is not obvious how a * joined snapshot could be defined and represented @@ -1091,9 +1093,9 @@ object evaluator extends EvaluationRules { : VerificationResult = { joiner.join[Term, Term](s, v)((s1, v1, QB) => - brancher.branch(s1, tLhs, eLhs, v1, fromShortCircuitingAnd)( - (s2, v2) => eval(s2, eRhs, pve, v2)(QB), - (s2, v2) => QB(s2, True, v2)) + brancher.branch(s1.copy(parallelizeBranches = false), tLhs, eLhs, v1, fromShortCircuitingAnd = fromShortCircuitingAnd)( + (s2, v2) => eval(s2.copy(parallelizeBranches = s1.parallelizeBranches), eRhs, pve, v2)(QB), + (s2, v2) => QB(s2.copy(parallelizeBranches = s1.parallelizeBranches), True, v2)) )(entries => { assert(entries.length <= 2) val s1 = entries.tail.foldLeft(entries.head.s)((sAcc, entry) => sAcc.merge(entry.s)) @@ -1595,9 +1597,9 @@ object evaluator extends EvaluationRules { case `stop` => Q(s1, t0, v1) // Done, if last expression was true/false for or/and (optimisation) case _ => joiner.join[Term, Term](s1, v1)((s2, v2, QB) => - brancher.branch(s2, t0, Some(viper.silicon.utils.ast.BigAnd(exps)), v2, true) _ tupled swapIfAnd( - (s3, v3) => QB(s3, constructor(Seq(t0)), v3), - (s3, v3) => evalSeqShortCircuit(constructor, s3, exps.tail, pve, v3)(QB)) + brancher.branch(s2.copy(parallelizeBranches = false), t0, Some(viper.silicon.utils.ast.BigAnd(exps)), v2, fromShortCircuitingAnd = true) _ tupled swapIfAnd( + (s3, v3) => QB(s3.copy(parallelizeBranches = s2.parallelizeBranches), constructor(Seq(t0)), v3), + (s3, v3) => evalSeqShortCircuit(constructor, s3.copy(parallelizeBranches = s2.parallelizeBranches), exps.tail, pve, v3)(QB)) ){case Seq(ent) => (ent.s, ent.data) case Seq(ent1, ent2) => diff --git a/src/main/scala/rules/Executor.scala b/src/main/scala/rules/Executor.scala index a1824929f..1f43e3b6c 100644 --- a/src/main/scala/rules/Executor.scala +++ b/src/main/scala/rules/Executor.scala @@ -22,7 +22,7 @@ import viper.silicon.state.terms._ import viper.silicon.state.terms.predef.`?r` import viper.silicon.utils.freshSnap import viper.silicon.verifier.Verifier -import viper.silver.cfg.ConditionalEdge +import viper.silver.cfg.{ConditionalEdge, StatementBlock} trait ExecutionRules extends SymbolicExecutionRules { def exec(s: State, @@ -45,33 +45,41 @@ object executor extends ExecutionRules { import evaluator._ import producer._ - private def follow(s: State, edge: SilverEdge, v: Verifier) + private def follow(s: State, edge: SilverEdge, v: Verifier, joinPoint: Option[SilverBlock]) (Q: (State, Verifier) => VerificationResult) : VerificationResult = { - edge match { - case ce: cfg.ConditionalEdge[ast.Stmt, ast.Exp] => - val condEdgeRecord = new ConditionalEdgeRecord(ce.condition, s, v.decider.pcs) - val sepIdentifier = v.symbExLog.openScope(condEdgeRecord) - val s1 = handleOutEdge(s, edge, v) - eval(s1, ce.condition, IfFailed(ce.condition), v)((s2, tCond, v1) => - /* Using branch(...) here ensures that the edge condition is recorded - * as a branch condition on the pathcondition stack. - */ - brancher.branch(s2, tCond, Some(ce.condition), v1)( - (s3, v3) => - exec(s3, ce.target, ce.kind, v3)((s4, v4) => { - v4.symbExLog.closeScope(sepIdentifier) - Q(s4, v4) - }), - (_, v3) => { - v3.symbExLog.closeScope(sepIdentifier) - Success() - })) - case ue: cfg.UnconditionalEdge[ast.Stmt, ast.Exp] => + joinPoint match { + case Some(jp) if jp == edge.target => + // Join point reached, stop following edges. val s1 = handleOutEdge(s, edge, v) - exec(s1, ue.target, ue.kind, v)(Q) + Q(s1, v) + + case _ => edge match { + case ce: cfg.ConditionalEdge[ast.Stmt, ast.Exp] => + val condEdgeRecord = new ConditionalEdgeRecord(ce.condition, s, v.decider.pcs) + val sepIdentifier = v.symbExLog.openScope(condEdgeRecord) + val s1 = handleOutEdge(s, edge, v) + eval(s1, ce.condition, IfFailed(ce.condition), v)((s2, tCond, v1) => + /* Using branch(...) here ensures that the edge condition is recorded + * as a branch condition on the pathcondition stack. + */ + brancher.branch(s2.copy(parallelizeBranches = false), tCond, Some(ce.condition), v1)( + (s3, v3) => + exec(s3.copy(parallelizeBranches = s2.parallelizeBranches), ce.target, ce.kind, v3, joinPoint)((s4, v4) => { + v4.symbExLog.closeScope(sepIdentifier) + Q(s4, v4) + }), + (_, v3) => { + v3.symbExLog.closeScope(sepIdentifier) + Success() + })) + + case ue: cfg.UnconditionalEdge[ast.Stmt, ast.Exp] => + val s1 = handleOutEdge(s, edge, v) + exec(s1, ue.target, ue.kind, v, joinPoint)(Q) + } } } @@ -91,51 +99,95 @@ object executor extends ExecutionRules { private def follows(s: State, edges: Seq[SilverEdge], @unused pvef: ast.Exp => PartialVerificationError, - v: Verifier) + v: Verifier, + joinPoint: Option[SilverBlock]) (Q: (State, Verifier) => VerificationResult) : VerificationResult = { - if (edges.isEmpty) { - Q(s, v) - } else if (edges.length == 1) { - follow(s, edges.head, v)(Q) - } else { - edges match { - case Seq(thenEdge@ConditionalEdge(cond1, _, _, _), elseEdge@ConditionalEdge(cond2, _, _, _)) - if Verifier.config.parallelizeBranches() && cond2 == ast.Not(cond1)() => - val condEdgeRecord = new ConditionalEdgeRecord(thenEdge.condition, s, v.decider.pcs) - val sepIdentifier = v.symbExLog.openScope(condEdgeRecord) - val res = eval(s, thenEdge.condition, IfFailed(thenEdge.condition), v)((s2, tCond, v1) => - brancher.branch(s2, tCond, Some(thenEdge.condition), v1)( - (s3, v3) => { - val s3p = handleOutEdge(s3, thenEdge, v3) - exec(s3p, thenEdge.target, thenEdge.kind, v3)((s4, v4) => { - v4.symbExLog.closeScope(sepIdentifier) - val branchRes = Q(s4, v4) - branchRes - }) - }, - (s3, v3) => { - val s3p = handleOutEdge(s3, elseEdge, v3) - exec(s3p, elseEdge.target, elseEdge.kind, v3)((s4, v4) => { - v4.symbExLog.closeScope(sepIdentifier) - Q(s4, v4) - }) - })) - res - case _ => - val uidBranchPoint = v.symbExLog.insertBranchPoint(edges.length) - val res = edges.zipWithIndex.foldLeft(Success(): VerificationResult) { - case (result: VerificationResult, (edge, edgeIndex)) => - if (edgeIndex != 0) { - v.symbExLog.switchToNextBranch(uidBranchPoint) - } - v.symbExLog.markReachable(uidBranchPoint) - result combine follow(s, edge, v)(Q) + // Find join point if it exists. + val jp: Option[SilverBlock] = edges.headOption.flatMap(edge => s.methodCfg.joinPoints.get(edge.source)) + + (edges, jp) match { + case (Seq(), _) => Q(s, v) + case (Seq(edge), _) => follow(s, edge, v, joinPoint)(Q) + case (Seq(edge1, edge2), Some(newJoinPoint)) if + s.moreJoins && + // Can't directly match type because of type erasure ... + edge1.isInstanceOf[ConditionalEdge[ast.Stmt, ast.Exp]] && + edge2.isInstanceOf[ConditionalEdge[ast.Stmt, ast.Exp]] && + // We only join branches that originate from if statements + // this is the case if the source is a statement block, + // as opposed to a loop head block. + edge1.source.isInstanceOf[StatementBlock[ast.Stmt, ast.Exp]] && + edge2.source.isInstanceOf[StatementBlock[ast.Stmt, ast.Exp]] => + + assert(edge1.source == edge2.source) + + val cedge1 = edge1.asInstanceOf[ConditionalEdge[ast.Stmt, ast.Exp]] + val cedge2 = edge2.asInstanceOf[ConditionalEdge[ast.Stmt, ast.Exp]] + + // Here we assume that edge1.condition is the negation of edge2.condition. + assert((cedge1.condition, cedge2.condition) match { + case (exp1, ast.Not(exp2)) => exp1 == exp2 + case (ast.Not(exp1), exp2) => exp1 == exp2 + case _ => false + }) + + eval(s, cedge1.condition, pvef(cedge1.condition), v)((s1, t0, v1) => + // The type arguments here are Null because there is no need to pass any join data. + joiner.join[scala.Null, scala.Null](s1, v1, resetState = false)((s2, v2, QB) => { + brancher.branch(s2, t0, Some(cedge1.condition), v2)( + // Follow only until join point. + (s3, v3) => follow(s3, edge1, v3, Some(newJoinPoint))((s, v) => QB(s, null, v)), + (s3, v3) => follow(s3, edge2, v3, Some(newJoinPoint))((s, v) => QB(s, null, v)) + ) + })(entries => { + val s2 = entries match { + case Seq(entry) => // One branch is dead + entry.s + case Seq(entry1, entry2) => // Both branches are alive + entry1.pathConditionAwareMerge(entry2, v1) + case _ => + sys.error(s"Unexpected join data entries: $entries") + } + (s2, null) + })((s4, _, v4) => { + if (joinPoint.contains(newJoinPoint)) { + Q(s4, v4) + } else { + // Continue after merging at join point. + exec(s4, newJoinPoint, s4.methodCfg.inEdges(newJoinPoint).head.kind, v4, joinPoint)(Q) + } + }) + ) + + case (Seq(thenEdge@ConditionalEdge(cond1, _, _, _), elseEdge@ConditionalEdge(cond2, _, _, _)), _) + if Verifier.config.parallelizeBranches() && cond2 == ast.Not(cond1)() => + val condEdgeRecord = new ConditionalEdgeRecord(thenEdge.condition, s, v.decider.pcs) + val sepIdentifier = v.symbExLog.openScope(condEdgeRecord) + val res = eval(s, thenEdge.condition, IfFailed(thenEdge.condition), v)((s2, tCond, v1) => + brancher.branch(s2, tCond, Some(thenEdge.condition), v1)( + (s3, v3) => { + follow(s3, thenEdge, v3, joinPoint)(Q) + }, + (s3, v3) => { + follow(s3, elseEdge, v3, joinPoint)(Q) + })) + res + + case _ => + val uidBranchPoint = v.symbExLog.insertBranchPoint(edges.length) + val res = edges.zipWithIndex.foldLeft(Success(): VerificationResult) { + case (result: VerificationResult, (edge, edgeIndex)) => { + if (edgeIndex != 0) { + v.symbExLog.switchToNextBranch(uidBranchPoint) + } + v.symbExLog.markReachable(uidBranchPoint) + result combine follow(s, edge, v, joinPoint)(Q) } - v.symbExLog.endBranchPoint(uidBranchPoint) - res - } + } + v.symbExLog.endBranchPoint(uidBranchPoint) + res } } @@ -143,17 +195,17 @@ object executor extends ExecutionRules { (Q: (State, Verifier) => VerificationResult) : VerificationResult = { - exec(s, graph.entry, cfg.Kind.Normal, v)(Q) + exec(s, graph.entry, cfg.Kind.Normal, v, None)(Q) } - def exec(s: State, block: SilverBlock, incomingEdgeKind: cfg.Kind.Value, v: Verifier) + def exec(s: State, block: SilverBlock, incomingEdgeKind: cfg.Kind.Value, v: Verifier, joinPoint: Option[SilverBlock]) (Q: (State, Verifier) => VerificationResult) : VerificationResult = { block match { case cfg.StatementBlock(stmt) => execs(s, stmt, v)((s1, v1) => - follows(s1, magicWandSupporter.getOutEdges(s1, block), IfFailed, v1)(Q)) + follows(s1, magicWandSupporter.getOutEdges(s1, block), IfFailed, v1, joinPoint)(Q)) case _: cfg.PreconditionBlock[ast.Stmt, ast.Exp] | _: cfg.PostconditionBlock[ast.Stmt, ast.Exp] => @@ -208,7 +260,7 @@ object executor extends ExecutionRules { intermediateResult combine executionFlowController.locally(s1, v1)((s2, v2) => { eval(s2, eCond, WhileFailed(eCond), v2)((_, _, _) => Success())})}})}) - && executionFlowController.locally(s, v)((s0, v0) => { + combine executionFlowController.locally(s, v)((s0, v0) => { v0.decider.prover.comment("Loop head block: Establish invariant") consumes(s0, invs, LoopInvariantNotEstablished, v0)((sLeftover, _, v1) => { v1.decider.prover.comment("Loop head block: Execute statements of loop head block (in invariant state)") @@ -217,7 +269,7 @@ object executor extends ExecutionRules { case (intermediateResult, (s1, pcs, ff1)) => /* [BRANCH-PARALLELISATION] ff1 */ val s2 = s1.copy(invariantContexts = sLeftover.h +: s1.invariantContexts) intermediateResult combine executionFlowController.locally(s2, v1)((s3, v2) => { - v2.decider.declareAndRecordAsFreshFunctions(ff1 -- v2.decider.freshFunctions) /* [BRANCH-PARALLELISATION] */ + v2.decider.declareAndRecordAsFreshFunctions(ff1 -- v2.decider.freshFunctions, true) /* [BRANCH-PARALLELISATION] */ v2.decider.assume(pcs.assumptions) v2.decider.prover.saturate(Verifier.config.proverSaturationTimeouts.afterContract) if (v2.decider.checkSmoke()) @@ -225,7 +277,7 @@ object executor extends ExecutionRules { else { execs(s3, stmts, v2)((s4, v3) => { v3.decider.prover.comment("Loop head block: Follow loop-internal edges") - follows(s4, sortedEdges, WhileFailed, v3)(Q)})}})}})})) + follows(s4, sortedEdges, WhileFailed, v3, joinPoint)(Q)})}})}})})) case _ => /* We've reached a loop head block via an edge other than an in-edge: a normal edge or @@ -339,7 +391,7 @@ object executor extends ExecutionRules { val h3 = Heap(remainingChunks ++ otherChunks) val (sm, smValueDef) = quantifiedChunkSupporter.singletonSnapshotMap(s3, field, Seq(tRcvr), tRhs, v2) v1.decider.prover.comment("Definitional axioms for singleton-FVF's value") - v1.decider.assume(smValueDef) + v1.decider.assumeDefinition(smValueDef) val ch = quantifiedChunkSupporter.createSingletonQuantifiedChunk(Seq(`?r`), field, Seq(tRcvr), FullPerm, sm, s.program) if (s3.heapDependentTriggers.contains(field)) v1.decider.assume(FieldTrigger(field.name, sm, tRcvr)) @@ -374,7 +426,7 @@ object executor extends ExecutionRules { if (s.qpFields.contains(field)) { val (sm, smValueDef) = quantifiedChunkSupporter.singletonSnapshotMap(s, field, Seq(tRcvr), snap, v) v.decider.prover.comment("Definitional axioms for singleton-FVF's value") - v.decider.assume(smValueDef) + v.decider.assumeDefinition(smValueDef) quantifiedChunkSupporter.createSingletonQuantifiedChunk(Seq(`?r`), field, Seq(tRcvr), p, sm, s.program) } else { BasicChunk(FieldID, BasicChunkIdentifier(field.name), Seq(tRcvr), snap, p) @@ -627,7 +679,7 @@ object executor extends ExecutionRules { * reported by Silicon issue #328. */ val t = v.decider.fresh(name, v.symbolConverter.toSort(typ)) - v.decider.assume(t === rhs) + v.decider.assumeDefinition(t === rhs) t } diff --git a/src/main/scala/rules/Joiner.scala b/src/main/scala/rules/Joiner.scala index 7e7a56354..04ac754e7 100644 --- a/src/main/scala/rules/Joiner.scala +++ b/src/main/scala/rules/Joiner.scala @@ -13,10 +13,18 @@ import viper.silicon.state.State import viper.silicon.state.terms.{And, Or, Term} import viper.silicon.verifier.Verifier -case class JoinDataEntry[D](s: State, data: D, pathConditions: RecordedPathConditions) +case class JoinDataEntry[D](s: State, data: D, pathConditions: RecordedPathConditions) { + // Instead of merging states by calling State.merge, + // we can directly merge JoinDataEntries to obtain new States, + // and the join data entries themselves provide information about the path conditions to State.merge. + def pathConditionAwareMerge(other: JoinDataEntry[D], v: Verifier): State = { + val res = State.merge(this.s, this.pathConditions, other.s, other.pathConditions) + v.stateConsolidator.consolidate(res, v) + } +} trait JoiningRules extends SymbolicExecutionRules { - def join[D, JD](s: State, v: Verifier) + def join[D, JD](s: State, v: Verifier, resetState: Boolean = true) (block: (State, Verifier, (State, D, Verifier) => VerificationResult) => VerificationResult) (merge: Seq[JoinDataEntry[D]] => (State, JD)) (Q: (State, JD, Verifier) => VerificationResult) @@ -24,7 +32,7 @@ trait JoiningRules extends SymbolicExecutionRules { } object joiner extends JoiningRules { - def join[D, JD](s: State, v: Verifier) + def join[D, JD](s: State, v: Verifier, resetState: Boolean = true) (block: (State, Verifier, (State, D, Verifier) => VerificationResult) => VerificationResult) (merge: Seq[JoinDataEntry[D]] => (State, JD)) (Q: (State, JD, Verifier) => VerificationResult) @@ -40,16 +48,27 @@ object joiner extends JoiningRules { val s2 = s1.copy(underJoin = true) block(s2, v1, (s3, data, v2) => { - /* In order to prevent mismatches between different final states of the evaluation - * paths that are to be joined, we reset certain state properties that may have been - * affected by the evaluation - such as the store (by let-bindings) or the heap (by - * state consolidations) to their initial values. - */ - val s4 = s3.copy(g = s1.g, - h = s1.h, - oldHeaps = s1.oldHeaps, - underJoin = s1.underJoin, - retrying = s1.retrying) + val s4 = + if (resetState) { + /* In order to prevent mismatches between different final states of the evaluation + * paths that are to be joined, we reset certain state properties that may have been + * affected by the evaluation - such as the store (by let-bindings) or the heap (by + * state consolidations) to their initial values. + */ + s3.copy(g = s1.g, + h = s1.h, + oldHeaps = s1.oldHeaps, + underJoin = s1.underJoin, + // TODO: Evaluation should not affect partiallyConsumedHeap, probably + ssCache = s1.ssCache, + partiallyConsumedHeap = s1.partiallyConsumedHeap, + invariantContexts = s1.invariantContexts, + retrying = s1.retrying) + } else { + // For more joins, state shouldn't be reset. + s3 + } + entries :+= JoinDataEntry(s4, data, v2.decider.pcs.after(preMark)) Success() }) diff --git a/src/main/scala/rules/MagicWandSupporter.scala b/src/main/scala/rules/MagicWandSupporter.scala index 68ff99ff4..b648f97c6 100644 --- a/src/main/scala/rules/MagicWandSupporter.scala +++ b/src/main/scala/rules/MagicWandSupporter.scala @@ -261,27 +261,18 @@ object magicWandSupporter extends SymbolicExecutionRules { var conservedPcs: Vector[RecordedPathConditions] = Vector.empty var conservedPcsStack: Stack[Vector[RecordedPathConditions]] = s5.conservedPcs - // Do not record further path conditions if the current state is inconsistent. - // This is an ad-hoc workaround to mitigate the following problem: producing a wand's LHS - // and executing the packaging proof code can introduce definitional path conditions, e.g. + // Producing a wand's LHS and executing the packaging proof code can introduce definitional path conditions, e.g. // new permission and snapshot maps, which are in general necessary to proceed after the // package statement, e.g. to know which permissions have been consumed. - // Since the current implementation doesn't properly differentiate between definitional - // and arbitrary path conditions, all path conditions are recorded — which is unsound. - // To somewhat improve the situation, such that "non-malevolent" usage of wands works - // as expected, we simply check if the current state is known to be inconsistent, and if - // it is, we don't record (any further) path conditions. - // TODO: Fix this. Might require a substantial redesign of Silicon's path conditions, though. - - if (!v4.decider.checkSmoke()) { - conservedPcs = s5.conservedPcs.head :+ pcs - - conservedPcsStack = - s5.conservedPcs.tail match { - case empty @ Seq() => empty - case head +: tail => (head ++ conservedPcs) +: tail - } - } + // Here, we want to keep *only* the definitions, but no other path conditions. + + conservedPcs = s5.conservedPcs.head :+ pcs.definitionsOnly + + conservedPcsStack = + s5.conservedPcs.tail match { + case empty @ Seq() => empty + case head +: tail => (head ++ conservedPcs) +: tail + } val s6 = s5.copy(conservedPcs = conservedPcsStack, recordPcs = s.recordPcs) @@ -296,7 +287,7 @@ object magicWandSupporter extends SymbolicExecutionRules { val (sm, smValueDef) = quantifiedChunkSupporter.singletonSnapshotMap(s5, wand, args, MagicWandSnapshot(freshSnapRoot, snap), v4) v4.decider.prover.comment("Definitional axioms for singleton-SM's value") - v4.decider.assume(smValueDef) + v4.decider.assumeDefinition(smValueDef) val ch = quantifiedChunkSupporter.createSingletonQuantifiedChunk(formalVars, wand, args, FullPerm, sm, s.program) appendToResults(s5, ch, v4.decider.pcs.after(preMark), v4) Success() diff --git a/src/main/scala/rules/MoreCompleteExhaleSupporter.scala b/src/main/scala/rules/MoreCompleteExhaleSupporter.scala index 1ded4387e..0aa6464bd 100644 --- a/src/main/scala/rules/MoreCompleteExhaleSupporter.scala +++ b/src/main/scala/rules/MoreCompleteExhaleSupporter.scala @@ -114,7 +114,7 @@ object moreCompleteExhaleSupporter extends SymbolicExecutionRules { } val (s1, taggedSnap, snapDefs, permSum) = summariseOnly(s, relevantChunks, resource, args, v) - v.decider.assume(And(snapDefs)) + v.decider.assumeDefinition(And(snapDefs)) // v.decider.assume(PermAtMost(permSum, FullPerm())) /* Done in StateConsolidator instead */ val s2 = diff --git a/src/main/scala/rules/PredicateSupporter.scala b/src/main/scala/rules/PredicateSupporter.scala index da0661fe8..30d6ffc3b 100644 --- a/src/main/scala/rules/PredicateSupporter.scala +++ b/src/main/scala/rules/PredicateSupporter.scala @@ -72,7 +72,7 @@ object predicateSupporter extends PredicateSupportRules { val (sm, smValueDef) = quantifiedChunkSupporter.singletonSnapshotMap(s2, predicate, tArgs, predSnap, v1) v1.decider.prover.comment("Definitional axioms for singleton-SM's value") - v1.decider.assume(smValueDef) + v1.decider.assumeDefinition(smValueDef) val ch = quantifiedChunkSupporter.createSingletonQuantifiedChunk( formalArgs, predicate, tArgs, tPerm, sm, s.program) diff --git a/src/main/scala/rules/Producer.scala b/src/main/scala/rules/Producer.scala index 67acaf36f..205579e35 100644 --- a/src/main/scala/rules/Producer.scala +++ b/src/main/scala/rules/Producer.scala @@ -214,6 +214,39 @@ object producer extends ProductionRules { continuation(if (state.exhaleExt) state.copy(reserveHeaps = state.h +: state.reserveHeaps.drop(1)) else state, verifier) val produced = a match { + case imp @ ast.Implies(e0, a0) if !a.isPure && s.moreJoins => + val impliesRecord = new ImpliesRecord(imp, s, v.decider.pcs, "produce") + val uidImplies = v.symbExLog.openScope(impliesRecord) + + eval(s, e0, pve, v)((s1, t0, v1) => + // The type arguments here are Null because there is no need to pass any join data. + joiner.join[scala.Null, scala.Null](s1, v1, resetState = false)((s1, v1, QB) => + branch(s1.copy(parallelizeBranches = false), t0, Some(e0), v1)( + (s2, v2) => produceR(s2.copy(parallelizeBranches = s1.parallelizeBranches), sf, a0, pve, v2)((s3, v3) => { + v3.symbExLog.closeScope(uidImplies) + QB(s3, null, v3) + }), + (s2, v2) => { + v2.decider.assume(sf(sorts.Snap, v2) === Unit) + /* TODO: Avoid creating a fresh var (by invoking) `sf` that is not used + * otherwise. In order words, only make this assumption if `sf` has + * already been used, e.g. in a snapshot equality such as `s0 == (s1, s2)`. + */ + v2.symbExLog.closeScope(uidImplies) + QB(s2.copy(parallelizeBranches = s1.parallelizeBranches), null, v2) + }) + )(entries => { + val s2 = entries match { + case Seq(entry) => // One branch is dead + entry.s + case Seq(entry1, entry2) => // Both branches are alive + entry1.pathConditionAwareMerge(entry2, v1) + case _ => + sys.error(s"Unexpected join data entries: $entries")} + (s2, null) + })((s, _, v) => Q(s, v)) + ) + case imp @ ast.Implies(e0, a0) if !a.isPure => val impliesRecord = new ImpliesRecord(imp, s, v.decider.pcs, "produce") val uidImplies = v.symbExLog.openScope(impliesRecord) @@ -234,6 +267,34 @@ object producer extends ProductionRules { Q(s2, v2) })) + case ite @ ast.CondExp(e0, a1, a2) if !a.isPure && s.moreJoins => + val condExpRecord = new CondExpRecord(ite, s, v.decider.pcs, "produce") + val uidCondExp = v.symbExLog.openScope(condExpRecord) + + eval(s, e0, pve, v)((s1, t0, v1) => + // The type arguments here are Null because there is no need to pass any join data. + joiner.join[scala.Null, scala.Null](s1, v1, resetState = false)((s1, v1, QB) => + branch(s1.copy(parallelizeBranches = false), t0, Some(e0), v1)( + (s2, v2) => produceR(s2.copy(parallelizeBranches = s1.parallelizeBranches), sf, a1, pve, v2)((s3, v3) => { + v3.symbExLog.closeScope(uidCondExp) + QB(s3, null, v3) + }), + (s2, v2) => produceR(s2.copy(parallelizeBranches = s1.parallelizeBranches), sf, a2, pve, v2)((s3, v3) => { + v3.symbExLog.closeScope(uidCondExp) + QB(s3, null, v3) + })) + )(entries => { + val s2 = entries match { + case Seq(entry) => // One branch is dead + entry.s + case Seq(entry1, entry2) => // Both branches are alive + entry1.pathConditionAwareMerge(entry2, v1) + case _ => + sys.error(s"Unexpected join data entries: $entries")} + (s2, null) + })((s, _, v) => Q(s, v)) + ) + case ite @ ast.CondExp(e0, a1, a2) if !a.isPure => val condExpRecord = new CondExpRecord(ite, s, v.decider.pcs, "produce") val uidCondExp = v.symbExLog.openScope(condExpRecord) @@ -301,7 +362,7 @@ object producer extends ProductionRules { quantifiedChunkSupporter.singletonSnapshotMap(s1, wand, args, sf(sorts.Snap, v1), v1) v1.decider.prover.comment("Definitional axioms for singleton-SM's value") val definitionalAxiomMark = v1.decider.setPathConditionMark() - v1.decider.assume(smValueDef) + v1.decider.assumeDefinition(smValueDef) val conservedPcs = if (s1.recordPcs) (s1.conservedPcs.head :+ v1.decider.pcs.after(definitionalAxiomMark)) +: s1.conservedPcs.tail else s1.conservedPcs diff --git a/src/main/scala/rules/QuantifiedChunkSupport.scala b/src/main/scala/rules/QuantifiedChunkSupport.scala index 86c3b3165..3067949ea 100644 --- a/src/main/scala/rules/QuantifiedChunkSupport.scala +++ b/src/main/scala/rules/QuantifiedChunkSupport.scala @@ -967,7 +967,7 @@ object quantifiedChunkSupporter extends QuantifiedChunkSupport { val (sm, smValueDef) = quantifiedChunkSupporter.singletonSnapshotMap(s, resource, tArgs, tSnap, v) v.decider.prover.comment("Definitional axioms for singleton-SM's value") val definitionalAxiomMark = v.decider.setPathConditionMark() - v.decider.assume(smValueDef) + v.decider.assumeDefinition(smValueDef) val conservedPcs = if (s.recordPcs) (s.conservedPcs.head :+ v.decider.pcs.after(definitionalAxiomMark)) +: s.conservedPcs.tail else s.conservedPcs diff --git a/src/main/scala/state/State.scala b/src/main/scala/state/State.scala index 9dd3fab18..6634c70c1 100644 --- a/src/main/scala/state/State.scala +++ b/src/main/scala/state/State.scala @@ -11,8 +11,9 @@ import viper.silver.cfg.silver.SilverCfg import viper.silicon.common.Mergeable import viper.silicon.common.collections.immutable.InsertionOrderedSet import viper.silicon.decider.RecordedPathConditions +import viper.silicon.interfaces.state.{Chunk, GeneralChunk} import viper.silicon.state.State.OldHeaps -import viper.silicon.state.terms.{PermLiteral, Term, Var} +import viper.silicon.state.terms.{And, Ite, NoPerm, PermLiteral, SeqAppend, Term, Var} import viper.silicon.supporters.PredicateData import viper.silicon.supporters.functions.{FunctionData, FunctionRecorder, NoopFunctionRecorder} import viper.silicon.{Map, Stack} @@ -68,7 +69,8 @@ final case class State(g: Store = Store(), retryLevel: Int = 0, /* ast.Field, ast.Predicate, or MagicWandIdentifier */ heapDependentTriggers: InsertionOrderedSet[Any] = InsertionOrderedSet.empty, - moreCompleteExhale: Boolean = false) + moreCompleteExhale: Boolean = false, + moreJoins: Boolean = false) extends Mergeable[State] { val isMethodVerification: Boolean = { @@ -157,7 +159,7 @@ object State { ssCache1, hackIssue387DisablePermissionConsumption1, qpFields1, qpPredicates1, qpMagicWands1, smCache1, pmCache1, smDomainNeeded1, predicateSnapMap1, predicateFormalVarMap1, retryLevel, useHeapTriggers, - moreCompleteExhale) => + moreCompleteExhale, moreJoins) => /* Decompose state s2: most values must match those of s1 */ s2 match { @@ -182,7 +184,7 @@ object State { ssCache2, `hackIssue387DisablePermissionConsumption1`, `qpFields1`, `qpPredicates1`, `qpMagicWands1`, smCache2, pmCache2, `smDomainNeeded1`, `predicateSnapMap1`, `predicateFormalVarMap1`, `retryLevel`, `useHeapTriggers`, - moreCompleteExhale2) => + moreCompleteExhale2, `moreJoins`) => val functionRecorder3 = functionRecorder1.merge(functionRecorder2) val triggerExp3 = triggerExp1 && triggerExp2 @@ -220,6 +222,219 @@ object State { } } + // Lists all fields which do not match in two states. + private def generateStateMismatchErrorMessage(s1: State, s2: State): Nothing = { + val err = new StringBuilder() + for (ix <- 0 until s1.productArity) yield { + val e1 = s1.productElement(ix) + val e2 = s2.productElement(ix) + if (e1 != e2) { + err ++= s"\n\tField index ${s1.productElementName(ix)} not equal" + err ++= s"\n\t\t state1: $e1" + err ++= s"\n\t\t state2: $e2" + + } + } + sys.error(s"State merging failed: unexpected mismatch between symbolic states: $err") + } + + // Merges two maps based on fOnce, if entry only exists in one map, + // and fTwice if entry exists in both maps. + // Used to merge the Store. + private def mergeMaps[K, V, D](map1: Map[K, V], data1: D, map2: Map[K, V], data2: D) + (fOnce: (V, D) => Option[V]) + (fTwice: (V, D, V, D) => Option[V]) + : Map[K, V] = { + + map1.flatMap({ case (k, v1) => + (map2.get(k) match { + case Some(v2) => fTwice(v1, data1, v2, data2) + case None => fOnce(v1, data1) + }).map(v => (k, v)) + }) ++ map2.flatMap({ case (k, v2) => + (map1.get(k) match { + case Some(_) => None // Already considered in first case: Some(fTwice(v1, c1, v2, c2)) + case None => fOnce(v2, data2) + }).map(v => (k, v)) + }) + } + + // Puts a collection of chunks under a condition. + private def conditionalizeChunks(h: Iterable[Chunk], cond: Term): Iterable[Chunk] = { + h map (c => { + c match { + case c: GeneralChunk => + c.withPerm(Ite(cond, c.perm, NoPerm)) + case _ => sys.error("Chunk type not conditionalizable.") + } + }) + } + + // Puts a heap under a condition. + private def conditionalizeHeap(h: Heap, cond: Term): Heap = { + Heap(conditionalizeChunks(h.values, cond)) + } + + // Merges two heaps together, by putting h1 under condition cond1, + // and h2 under cond2. + // Assumes that cond1 is the negation of cond2. + def mergeHeap(h1: Heap, cond1: Term, h2: Heap, cond2: Term): Heap = { + val (unconditionalHeapChunks, h1HeapChunksToConditionalize) = h1.values.partition(c1 => h2.values.exists(_ == c1)) + val h2HeapChunksToConditionalize = h2.values.filter(c2 => !unconditionalHeapChunks.exists(_ == c2)) + val h1ConditionalizedHeapChunks = conditionalizeChunks(h1HeapChunksToConditionalize, cond1) + val h2ConditionalizedHeapChunks = conditionalizeChunks(h2HeapChunksToConditionalize, cond2) + Heap(unconditionalHeapChunks) + Heap(h1ConditionalizedHeapChunks) + Heap(h2ConditionalizedHeapChunks) + } + + def merge(s1: State, pc1: RecordedPathConditions, s2: State, pc2: RecordedPathConditions): State = { + s1 match { + /* Decompose state s1 */ + case State(g1, h1, program, member, + predicateData, functionData, + oldHeaps1, + parallelizeBranches1, + recordVisited1, visited1, + methodCfg1, invariantContexts1, + constrainableARPs1, + quantifiedVariables1, + retrying1, + underJoin1, + functionRecorder1, + conservingSnapshotGeneration1, + recordPossibleTriggers1, possibleTriggers1, + triggerExp1, + partiallyConsumedHeap1, + permissionScalingFactor1, + reserveHeaps1, reserveCfgs1, conservedPcs1, recordPcs1, exhaleExt1, + ssCache1, hackIssue387DisablePermissionConsumption1, + qpFields1, qpPredicates1, qpMagicWands1, smCache1, pmCache1, smDomainNeeded1, + predicateSnapMap1, predicateFormalVarMap1, retryLevel, useHeapTriggers, + moreCompleteExhale, moreJoins) => + + /* Decompose state s2: most values must match those of s1 */ + s2 match { + case State(g2, h2, `program`, `member`, + `predicateData`, `functionData`, + oldHeaps2, + `parallelizeBranches1`, + `recordVisited1`, `visited1`, + `methodCfg1`, invariantContexts2, + constrainableARPs2, + `quantifiedVariables1`, + `retrying1`, + `underJoin1`, + functionRecorder2, + `conservingSnapshotGeneration1`, + `recordPossibleTriggers1`, possibleTriggers2, + triggerExp2, + partiallyConsumedHeap2, + `permissionScalingFactor1`, + reserveHeaps2, `reserveCfgs1`, conservedPcs2, `recordPcs1`, `exhaleExt1`, + ssCache2, `hackIssue387DisablePermissionConsumption1`, + `qpFields1`, `qpPredicates1`, `qpMagicWands1`, smCache2, pmCache2, smDomainNeeded2, + `predicateSnapMap1`, `predicateFormalVarMap1`, `retryLevel`, `useHeapTriggers`, + moreCompleteExhale2, `moreJoins`) => + + val functionRecorder3 = functionRecorder1.merge(functionRecorder2) + val triggerExp3 = triggerExp1 && triggerExp2 + val possibleTriggers3 = possibleTriggers1 ++ possibleTriggers2 + val constrainableARPs3 = constrainableARPs1 ++ constrainableARPs2 + + val smDomainNeeded3 = smDomainNeeded1 || smDomainNeeded2 + + val conditions1 = And(pc1.branchConditions) + val conditions2 = And(pc2.branchConditions) + + val mergeStore = (g1: Store, g2: Store) => { + Store(mergeMaps(g1.values, conditions1, g2.values, conditions2) + ((_, _) => { + // If store entry is only on one branch, we can safely discard it. + None + }) + ((v1, cond1, v2, cond2) => { + if (v1 == v2) { + // Trivial: Both entries are the same. + Some(v1) + } else { + assert(v1.sort == v2.sort) + Some(Ite(cond1, v1, v2)) + } + })) + } + + val g3 = mergeStore(g1, g2) + + val h3 = mergeHeap(h1, conditions1, h2, conditions2) + + val partiallyConsumedHeap3 = (partiallyConsumedHeap1, partiallyConsumedHeap2) match { + case (None, None) => None + case (Some(pch1), None) => Some(conditionalizeHeap(pch1, conditions1)) + case (None, Some(pch2)) => Some(conditionalizeHeap(pch2, conditions2)) + case (Some(pch1), Some(pch2)) => Some(mergeHeap( + pch1, conditions1, + pch2, conditions2 + )) + } + + val oldHeaps3 = Map.from(mergeMaps(oldHeaps1, conditions1, oldHeaps2, conditions2) + ((_, _) => { + None + }) + ((heap1, cond1, heap2, cond2) => { + Some(mergeHeap(heap1, cond1, heap2, cond2)) + })) + + assert(invariantContexts1.length == invariantContexts2.length) + val invariantContexts3 = invariantContexts1 + .zip(invariantContexts2) + .map({case (h1, h2) => mergeHeap(h1, conditions1, h2, conditions2)}) + + assert(reserveHeaps1.length == reserveHeaps2.length) + val reserveHeaps3 = reserveHeaps1 + .zip(reserveHeaps2) + .map({case (h1, h2) => mergeHeap(h1, conditions1, h2, conditions2)}) + + + assert(conservedPcs1.length == conservedPcs2.length) + val conservedPcs3 = conservedPcs1 + .zip(conservedPcs1) + .map({case (pcs1, pcs2) => (pcs1 ++ pcs2).distinct}) + + val ssCache3 = ssCache1 ++ ssCache2 + val smCache3 = smCache1.union(smCache2) + val pmCache3 = pmCache1 ++ pmCache2 + + val s3 = s1.copy(functionRecorder = functionRecorder3, + possibleTriggers = possibleTriggers3, + triggerExp = triggerExp3, + constrainableARPs = constrainableARPs3, + ssCache = ssCache3, + smCache = smCache3, + pmCache = pmCache3, + g = g3, + h = h3, + oldHeaps = oldHeaps3, + partiallyConsumedHeap = partiallyConsumedHeap3, + smDomainNeeded = smDomainNeeded3, + invariantContexts = invariantContexts3, + reserveHeaps = reserveHeaps3, + conservedPcs = conservedPcs3) + + s3 + + // Optionally, we could also do a state consolidation after each + // state merging, but this has shown to decrease performance a bit. + //val s4 = verifier.stateConsolidator.consolidate(s3, verifier) + //s4 + + case _ => { + println("Error at new merge function:") + generateStateMismatchErrorMessage(s1, s2) + } + } + } + } + def preserveAfterLocalEvaluation(pre: State, post: State): State = { pre.copy(functionRecorder = post.functionRecorder, possibleTriggers = post.possibleTriggers, diff --git a/src/main/scala/verifier/DefaultMainVerifier.scala b/src/main/scala/verifier/DefaultMainVerifier.scala index 4e3809788..c2d41d192 100644 --- a/src/main/scala/verifier/DefaultMainVerifier.scala +++ b/src/main/scala/verifier/DefaultMainVerifier.scala @@ -317,6 +317,7 @@ class DefaultMainVerifier(config: Config, } case _ => Verifier.config.exhaleMode == ExhaleMode.MoreComplete } + val moreJoins = Verifier.config.moreJoins() && member.isInstanceOf[ast.Method] State(program = program, functionData = functionData, @@ -328,7 +329,8 @@ class DefaultMainVerifier(config: Config, predicateFormalVarMap = predSnapGenerator.formalVarMap, currentMember = Some(member), heapDependentTriggers = resourceTriggers, - moreCompleteExhale = mce) + moreCompleteExhale = mce, + moreJoins = moreJoins) } private def createInitialState(@unused cfg: SilverCfg, @@ -349,7 +351,8 @@ class DefaultMainVerifier(config: Config, qpMagicWands = quantifiedMagicWands, predicateSnapMap = predSnapGenerator.snapMap, predicateFormalVarMap = predSnapGenerator.formalVarMap, - moreCompleteExhale = Verifier.config.exhaleMode == ExhaleMode.MoreComplete) + moreCompleteExhale = Verifier.config.exhaleMode == ExhaleMode.MoreComplete, + moreJoins = Verifier.config.moreJoins()) } private def excludeMethod(method: ast.Method) = ( diff --git a/src/test/resources/issue387/jonas_viktor.vpr b/src/test/resources/issue387/jonas_viktor.vpr new file mode 100644 index 000000000..2859a762a --- /dev/null +++ b/src/test/resources/issue387/jonas_viktor.vpr @@ -0,0 +1,71 @@ +// Any copyright is dedicated to the Public Domain. +// http://creativecommons.org/publicdomain/zero/1.0/ + + +// first version of the example + +domain ArrayDomain {} + +field val_int: Int +field val_ref: Ref + +function idx_into(a: ArrayDomain, a_len: Int): Int +function array_len(a: ArrayDomain): Int +function to_domain(self: Ref): ArrayDomain + requires acc(Array(self), read$()) + +function read$(): Perm + ensures none < result + ensures result < write + +predicate Array(self: Ref) +predicate usize(self: Ref) { + acc(self.val_int, write) +} + +method foo() { + var a: Ref + var a_len: Int + var _3: Ref + var unknown: Ref + var i: Ref + inhale acc(a.val_ref, write) && acc(Array(a.val_ref), write) + + inhale acc(_3.val_ref, write) && acc(usize(unknown), read$()) // <- removing this makes it pass + + exhale acc(a.val_ref, read$()) + + inhale acc(usize(i), write) && acc(a.val_ref, read$()) + + inhale (unfolding acc(usize(i), write) in + (forall q: Int :: { idx_into(to_domain(a.val_ref), q) } + !(q < i.val_int) || + idx_into(to_domain(a.val_ref), q) <= + idx_into(to_domain(a.val_ref), i.val_int))) + //:: UnexpectedOutput(assert.failed:assertion.false, /silicon/issue/387/) + assert (unfolding acc(usize(i), write) in + (forall q: Int :: { idx_into(to_domain(a.val_ref), q) } + !(q < i.val_int) || + idx_into(to_domain(a.val_ref), q) <= + idx_into(to_domain(a.val_ref), i.val_int))) +} + + +// second version of the example + +function holds(a: Ref, b: Int): Bool + +method foo2() { + var a: Ref + var _3: Ref + + inhale acc(a.val_ref, write) + + inhale acc(_3.val_ref, write) // <- removing this makes it pass + + exhale acc(a.val_ref, 1 / 2) + inhale acc(a.val_ref, 1 / 2) + + inhale forall q: Int :: holds(a.val_ref, q) + assert forall q: Int :: holds(a.val_ref, q) +} \ No newline at end of file diff --git a/src/test/resources/moreCompleteExhale/disjunctiveAliasing.vpr b/src/test/resources/moreCompleteExhale/disjunctiveAliasing.vpr new file mode 100644 index 000000000..a95bb5b45 --- /dev/null +++ b/src/test/resources/moreCompleteExhale/disjunctiveAliasing.vpr @@ -0,0 +1,20 @@ +// Any copyright is dedicated to the Public Domain. +// http://creativecommons.org/publicdomain/zero/1.0/ + +field f: Int + +function fn(x: Ref, y: Ref, z: Ref): Int + requires acc(x.f) && acc(y.f) + requires (z == x) || (z == y) +{ + z.f +} + +method m(x: Ref, y: Ref, z: Ref) + requires acc(x.f) && acc(y.f) + requires (z == x) || (z == y) +{ + var tmp: Int + tmp := z.f +} + diff --git a/src/test/scala/SiliconTests.scala b/src/test/scala/SiliconTests.scala index 027bd3da2..1a8669c43 100644 --- a/src/test/scala/SiliconTests.scala +++ b/src/test/scala/SiliconTests.scala @@ -15,7 +15,7 @@ import viper.silver.verifier.Verifier class SiliconTests extends SilSuite { private val siliconTestDirectories = - Seq("consistency", "issue387") + Seq("consistency") private val silTestDirectories = Seq("all", diff --git a/src/test/scala/SiliconTestsMoreCompleteExhale.scala b/src/test/scala/SiliconTestsMoreCompleteExhale.scala index 0266bae68..910a66f92 100644 --- a/src/test/scala/SiliconTestsMoreCompleteExhale.scala +++ b/src/test/scala/SiliconTestsMoreCompleteExhale.scala @@ -7,7 +7,7 @@ package viper.silicon.tests class SiliconTestsMoreCompleteExhale extends SiliconTests { - override val testDirectories: Seq[String] = Seq("moreCompleteExhale") + override val testDirectories: Seq[String] = Seq("moreCompleteExhale", "issue387") override val commandLineArguments: Seq[String] = Seq( "--timeout", "300" /* seconds */,