Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Interactive mode #914

Closed
wants to merge 10 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion book/src/guide/run.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ rustc-flux --crate-type=lib path/to/test.rs

### Refinement Annotations on a File

When running flux on a file with `rustc-flux path/to/test.rs`, refinement annotations should be prefixed with `flux::`.
When running flux on a file with `rustc-flux path/to/test.rs`, refinement annotations should be prefixed with `flux::`.

For example, the refinement below will only work when running `rustc-flux` which is intended for use on a single file.

Expand Down Expand Up @@ -149,6 +149,7 @@ You can set various `env` variables to customize the behavior of `flux`.
* `FLUX_LOG_DIR=path/to/log/` sets the directory where constraints, timing and cache are saved. Defaults to `./log/`.
* `FLUX_DUMP_CONSTRAINT=1` tell `flux` to dump constraints generated for each function.
* `FLUX_DUMP_CHECKER_TRACE=1` saves the checker's trace (useful for debugging!)
* `FLUX_CHECK_DIFF=1` only checks code in source files that were modified since the last check
* `FLUX_DUMP_TIMINGS=1` saves the profile information
* `FLUX_DUMP_MIR=1` saves the low-level MIR for each analyzed function
* `FLUX_POINTER_WIDTH=N` the size of (either `32` or `64`), used to determine if an integer cast is lossy (default `64`).
Expand Down
2 changes: 1 addition & 1 deletion crates/flux-common/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ edition.workspace = true

[dependencies]
flux-config.workspace = true

rustc-hash.workspace = true
serde.workspace = true

[package.metadata.rust-analyzer]
rustc_private = true
Expand Down
126 changes: 118 additions & 8 deletions crates/flux-common/src/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,19 @@ use std::{fs::File, path::PathBuf};

use flux_config as config;
use rustc_hash::FxHashMap;
use rustc_hir::def_id::LocalDefId;
use rustc_middle::ty::TyCtxt;
use rustc_span::FileName;

#[derive(serde::Serialize, serde::Deserialize)]
pub struct QueryCache {
entries: FxHashMap<String, u64>,
/// map from [`DefId`] (as string) to hash of the constraint for that [`DefId`]
entries: FxHashMap<String, Option<u64>>,
/// map from [`DefId`] (as string) to modification time (at *previous* execution of flux)
old_timestamps: FxHashMap<String, std::time::SystemTime>,
/// map from [`DefId`] (as string) to modification time (at *current* execution of flux);
/// the `DefId` is checked if its `new_timestap` is newer than its `old_timestamp`.
new_timestamps: FxHashMap<String, std::time::SystemTime>,
}

impl Default for QueryCache {
Expand All @@ -15,15 +25,89 @@ impl Default for QueryCache {

impl QueryCache {
pub fn new() -> Self {
QueryCache { entries: FxHashMap::default() }
QueryCache {
entries: FxHashMap::default(),
old_timestamps: FxHashMap::default(),
new_timestamps: FxHashMap::default(),
}
}

pub fn insert(&mut self, key: String, constr_hash: u64) {
pub fn insert_safe_query(
&mut self,
tcx: &TyCtxt,
def_id: LocalDefId,
constr_hash: Option<u64>,
) {
let key = tcx.def_path_str(def_id);
self.entries.insert(key, constr_hash);
}

pub fn is_safe(&self, key: &String, constr_hash: u64) -> bool {
config::is_cache_enabled() && self.entries.get(key).map_or(false, |h| *h == constr_hash)
pub fn is_safe_query(&self, tcx: &TyCtxt, def_id: LocalDefId, constr_hash: u64) -> bool {
let key = tcx.def_path_str(def_id);
config::is_cache_enabled()
&& self
.entries
.get(&key)
.map_or(false, |h| *h == Some(constr_hash))
}

pub fn is_safe_def_id(&self, tcx: &TyCtxt, def_id: LocalDefId) -> bool {
let key = tcx.def_path_str(def_id);
self.entries.contains_key(&key)
}

/// returns `Some((key, timestamp))` if the path exists,
/// with `key` as a string representation of the `path`
/// and `timestamp` as most recent known modification time for that path.
fn get_path_timestamp(
&mut self,
path: &std::path::Path,
) -> Option<(String, std::time::SystemTime)> {
let key = path.to_string_lossy().to_string();
match self.new_timestamps.get(&key) {
Some(t) => Some((key, *t)),
None => {
if let Ok(metadata) = std::fs::metadata(path) {
if let Ok(modified_time) = metadata.modified() {
self.new_timestamps.insert(key.clone(), modified_time);
return Some((key, modified_time));
}
}
None
}
}
}

fn get_def_id_timestamp(
&mut self,
tcx: &TyCtxt,
def_id: LocalDefId,
) -> Option<(String, std::time::SystemTime)> {
let span = tcx.def_span(def_id);
let sm = tcx.sess.source_map();
if let FileName::Real(file_name) = sm.span_to_filename(span) {
if let Some(path) = file_name.local_path() {
return self.get_path_timestamp(path);
}
}
None
}

/// returns `true` if the key is absent or the timestamp is newer or the def_id's constraint was NOT marked safe
/// If the query was not marked SAFE -- i.e. is not in `.entries` we consider it "modified" so that we can run
/// the checker to regenerate the error messages.
pub fn is_modified(&mut self, tcx: &TyCtxt, def_id: LocalDefId) -> bool {
if let Some((key, timestamp)) = self.get_def_id_timestamp(tcx, def_id) {
let old_timestamp = self.old_timestamps.get(&key);
let was_safe = self.entries.contains_key(&tcx.def_path_str(def_id));
if let Some(old_timestamp) = old_timestamp {
let is_same_timestamp = *old_timestamp == timestamp;
if is_same_timestamp && was_safe {
return false;
}
}
}
true
}

fn path() -> Result<PathBuf, std::io::Error> {
Expand All @@ -44,15 +128,41 @@ impl QueryCache {
pub fn save(&self) -> Result<(), std::io::Error> {
let path = Self::path()?;
let mut file = File::create(path).unwrap();
serde_json::to_writer(&mut file, &self.entries).unwrap();
serde_json::to_writer(&mut file, &self.to_jsonable_tuple()).unwrap();
Ok(())
}

// FIXME: silly hack to get around serde::Serialize shenanigans
fn to_jsonable_tuple(
&self,
) -> (FxHashMap<String, Option<u64>>, FxHashMap<String, std::time::SystemTime>) {
// we update the old timestamps with the new ones so as to *preserve* the old timestamps
// for files that were *not checked* in this current round for whatever `rustc` reason,
// e.g. they were cached, there were other errors etc. We want to save these too to avoid
// rechecking those files in subsequent runs (unless of course, those files are edited by then.)
let mut timestamps = self.old_timestamps.clone();
for (k, v) in &self.new_timestamps {
timestamps.insert(k.clone(), *v);
}
(self.entries.clone(), timestamps)
}

// FIXME: silly hack to get around serde::Deserialize shenanigans
fn from_jsonable_tuple(
tuple: (FxHashMap<String, Option<u64>>, FxHashMap<String, std::time::SystemTime>),
) -> Self {
QueryCache {
entries: tuple.0,
old_timestamps: tuple.1,
new_timestamps: FxHashMap::default(),
}
}

pub fn load() -> Self {
if let Ok(path) = Self::path() {
if let Ok(file) = File::open(path) {
if let Ok(entries) = serde_json::from_reader(file) {
return QueryCache { entries };
if let Ok(data) = serde_json::from_reader(file) {
return Self::from_jsonable_tuple(data);
}
}
}
Expand Down
8 changes: 5 additions & 3 deletions crates/flux-common/src/dbg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,9 +71,11 @@ pub use crate::_basic_block_start as basic_block_start;
#[macro_export]
macro_rules! _statement{
($pos:literal, $stmt:expr, $rcx:expr, $env:expr, $span:expr) => {{
let rcx_json = RefineCtxtTrace::new($rcx);
let env_json = TypeEnvTrace::new($env);
tracing::debug!(event = concat!("statement_", $pos), stmt = ?$stmt, stmt_span = ?$span, rcx = ?$rcx, env = ?$env, rcx_json = ?rcx_json, env_json = ?env_json)
if config::dump_checker_trace() {
let rcx_json = RefineCtxtTrace::new($rcx);
let env_json = TypeEnvTrace::new($env);
tracing::info!(event = concat!("statement_", $pos), stmt = ?$stmt, stmt_span = ?$span, rcx = ?$rcx, env = ?$env, rcx_json = ?rcx_json, env_json = ?env_json)
}
}};
}
pub use crate::_statement as statement;
Expand Down
10 changes: 8 additions & 2 deletions crates/flux-config/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,10 @@ pub fn check_def() -> &'static str {
&CONFIG.check_def
}

pub fn check_diff() -> bool {
CONFIG.check_diff
}

pub fn dump_timings() -> bool {
CONFIG.dump_timings
}
Expand Down Expand Up @@ -44,7 +48,7 @@ pub fn log_dir() -> &'static PathBuf {
}

pub fn is_cache_enabled() -> bool {
CONFIG.cache
CONFIG.cache || check_diff()
}

pub fn cache_path() -> PathBuf {
Expand Down Expand Up @@ -81,6 +85,7 @@ struct Config {
catch_bugs: bool,
pointer_width: PointerWidth,
check_def: String,
check_diff: bool,
cache: bool,
cache_file: String,
check_overflow: bool,
Expand Down Expand Up @@ -133,7 +138,8 @@ static CONFIG: LazyLock<Config> = LazyLock::new(|| {
.set_default("cache", false)?
.set_default("cache_file", "cache.json")?
.set_default("check_overflow", false)?
.set_default("scrape_quals", false)?;
.set_default("scrape_quals", false)?
.set_default("check_diff", false)?;
// Config comes first, environment settings override it.
if let Some(config_path) = CONFIG_PATH.as_ref() {
config_builder = config_builder.add_source(File::from(config_path.clone()));
Expand Down
21 changes: 15 additions & 6 deletions crates/flux-driver/src/callbacks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ fn check_crate(genv: GlobalEnv) -> Result<(), ErrorGuaranteed> {
.definitions()
.try_for_each_exhaust(|def_id| ck.check_def_catching_bugs(def_id));

ck.cache.save().unwrap_or(());
ck.query_cache.save().unwrap_or(());

tracing::info!("Callbacks::check_crate");

Expand Down Expand Up @@ -134,7 +134,7 @@ fn encode_and_save_metadata(genv: GlobalEnv) {

struct CrateChecker<'genv, 'tcx> {
genv: GlobalEnv<'genv, 'tcx>,
cache: QueryCache,
query_cache: QueryCache,
checker_config: CheckerConfig,
}

Expand All @@ -145,14 +145,20 @@ impl<'genv, 'tcx> CrateChecker<'genv, 'tcx> {
check_overflow: crate_config.check_overflow,
scrape_quals: crate_config.scrape_quals,
};
CrateChecker { genv, cache: QueryCache::load(), checker_config }
CrateChecker { genv, query_cache: QueryCache::load(), checker_config }
}

fn matches_check_def(&self, def_id: DefId) -> bool {
let def_path = self.genv.tcx().def_path_str(def_id);
def_path.contains(config::check_def())
}

/// Returns `true` if the the `check_diff()` mode is off OR the file in which this [`def_id`]
/// is defined has been modified since the last check.
fn matches_check_diff(&mut self, def_id: LocalDefId) -> bool {
!config::check_diff() || self.query_cache.is_modified(&self.genv.tcx(), def_id)
}

fn check_def_catching_bugs(&mut self, def_id: LocalDefId) -> Result<(), ErrorGuaranteed> {
let mut this = std::panic::AssertUnwindSafe(self);
let msg = format!("def_id: {:?}, span: {:?}", def_id, this.genv.tcx().def_span(def_id));
Expand All @@ -168,10 +174,13 @@ impl<'genv, 'tcx> CrateChecker<'genv, 'tcx> {
if self.genv.ignored(def_id.local_id()) || self.genv.is_dummy(def_id.local_id()) {
return Ok(());
}
if !self.matches_check_diff(def_id.local_id()) {
return Ok(());
}

match self.genv.def_kind(def_id) {
DefKind::Fn | DefKind::AssocFn => {
refineck::check_fn(self.genv, &mut self.cache, def_id, self.checker_config)
refineck::check_fn(self.genv, &mut self.query_cache, def_id, self.checker_config)
}
DefKind::Enum => {
let adt_def = self.genv.adt_def(def_id).emit(&self.genv)?;
Expand All @@ -184,7 +193,7 @@ impl<'genv, 'tcx> CrateChecker<'genv, 'tcx> {
.expect_enum();
refineck::invariants::check_invariants(
self.genv,
&mut self.cache,
&mut self.query_cache,
def_id,
enum_def.invariants,
&adt_def,
Expand All @@ -205,7 +214,7 @@ impl<'genv, 'tcx> CrateChecker<'genv, 'tcx> {
}
refineck::invariants::check_invariants(
self.genv,
&mut self.cache,
&mut self.query_cache,
def_id,
struct_def.invariants,
&adt_def,
Expand Down
13 changes: 7 additions & 6 deletions crates/flux-infer/src/fixpoint_encoding.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ use rustc_data_structures::{
};
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_index::newtype_index;
use rustc_middle::ty::TyCtxt;
use rustc_span::{Span, Symbol};
use rustc_type_ir::{BoundVar, DebruijnIndex};

Expand Down Expand Up @@ -337,6 +338,7 @@ where
// skip checking trivial constraints
if !constraint.is_concrete() {
self.ecx.errors.into_result()?;
cache.insert_safe_query(&self.genv.tcx(), self.def_id, None);
return Ok(vec![]);
}
let def_span = self.def_span();
Expand Down Expand Up @@ -384,9 +386,7 @@ where
dbg::dump_item_info(self.genv.tcx(), self.def_id, "smt2", &task).unwrap();
}

let task_key = self.genv.tcx().def_path_str(self.def_id);

match Self::run_task_with_cache(task, task_key, cache) {
match Self::run_task_with_cache(task, &self.genv.tcx(), self.def_id, cache) {
FixpointResult::Safe(_) => Ok(vec![]),
FixpointResult::Unsafe(_, errors) => {
Ok(errors
Expand All @@ -401,11 +401,12 @@ where

fn run_task_with_cache(
task: fixpoint::Task,
key: String,
tcx: &TyCtxt,
def_id: LocalDefId,
cache: &mut QueryCache,
) -> FixpointResult<TagIdx> {
let hash = task.hash_with_default();
if config::is_cache_enabled() && cache.is_safe(&key, hash) {
if config::is_cache_enabled() && cache.is_safe_query(tcx, def_id, hash) {
return FixpointResult::Safe(Default::default());
}

Expand All @@ -415,7 +416,7 @@ where

if config::is_cache_enabled() {
if let FixpointResult::Safe(_) = result {
cache.insert(key, hash);
cache.insert_safe_query(tcx, def_id, Some(hash));
}
}
result
Expand Down
8 changes: 8 additions & 0 deletions tools/vscode/.vscode/extensions.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
// See http://go.microsoft.com/fwlink/?LinkId=827846
// for the documentation about the extensions.json format
"recommendations": [
"dbaeumer.vscode-eslint",
"ms-vscode.extension-test-runner"
]
}
Loading
Loading