Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Lazy values #1386

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
61 changes: 61 additions & 0 deletions core/src/ops/konst.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
use std::fmt::Debug;

use dyn_clone::DynClone;

use crate::internal::*;

#[derive(Debug, Clone, new, Hash, Eq, PartialEq)]
Expand Down Expand Up @@ -72,3 +76,60 @@ impl TypedOp for Const {
target.wire_node(&node.name, op, &[])
}
}

#[derive(Debug, Clone, new)]
pub struct LazyConst(pub Arc<dyn LazyConstProvider>);

impl Op for LazyConst {
fn name(&self) -> Cow<str> {
"LazyConst".into()
}

fn info(&self) -> TractResult<Vec<String>> {
Ok(vec!(format!("{:?}", self.0)))
}

op_as_typed_op!();
}

impl EvalOp for LazyConst {
fn is_stateless(&self) -> bool {
false
}

fn state(
&self,
_session: &mut SessionState,
_node_id: usize,
) -> TractResult<Option<Box<dyn OpState>>> {
Ok(Some(Box::new(self.clone())))
}
}

impl OpState for LazyConst {
fn eval(
&mut self,
_session: &mut SessionState,
_op: &dyn Op,
_inputs: TVec<TValue>,
) -> TractResult<TVec<TValue>> {
Ok(tvec!(self.0.eval()?))
}
}

trivial_op_state_freeeze!(LazyConst);

impl TypedOp for LazyConst {
as_op!();

fn output_facts(&self, _inputs: &[&TypedFact]) -> TractResult<TVec<TypedFact>> {
Ok(tvec!(self.0.output_fact()?))
}
}

pub trait LazyConstProvider: DynClone + Debug + Send + Sync + 'static {
fn output_fact(&self) -> TractResult<TypedFact>;
fn eval(&self) -> TractResult<TValue>;
}

dyn_clone::clone_trait_object!(LazyConstProvider);
2 changes: 1 addition & 1 deletion libcli/src/terminal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ fn render_node_prefixed(

// flops column
let mut flops_column = if options.profile && options.cost {
let timing: f64 = tags.profile.as_ref().unwrap().as_secs_f64();
let timing: f64 = tags.profile.as_ref().map(Duration::as_secs_f64).unwrap_or(0.0);
let flops_column_pad = flops_column_pad.clone();
let it = tags.cost.iter().map(move |c| {
if c.0.is_compute() {
Expand Down
1 change: 1 addition & 0 deletions nnef/nnef-resources/src/json_loader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ impl ResourceLoader for JsonLoader {
fn try_load(
&self,
path: &Path,
_lazy_data_provider: Option<LazyReader>,
reader: &mut dyn std::io::Read,
_framework: &tract_nnef::framework::Nnef,
) -> TractResult<Option<(String, Arc<dyn Resource>)>> {
Expand Down
21 changes: 21 additions & 0 deletions nnef/src/ast.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,36 @@
use std::fs::File;
use std::io::Read;
use std::path::PathBuf;

use crate::internal::*;
use tract_itertools::Itertools;
use crate::resource::LazyDat;

pub mod dump;
pub mod dump_doc;
pub mod parse;
pub mod quant;

#[derive(Clone,Debug)]
pub enum LazyReader {
File(PathBuf),
}

impl LazyReader {
pub fn read(&self) -> TractResult<Box<dyn Read>> {
match self {
LazyReader::File(p) => {
Ok(Box::new(File::open(p).with_context(|| format!("Opening {p:?}"))?))
}
}
}
}

#[derive(Clone, Debug)]
pub struct ProtoModel {
pub doc: Document,
pub tensors: HashMap<Identifier, Arc<Tensor>>,
pub lazy_tensors: HashMap<Identifier, Arc<LazyDat>>,
pub quantization: Option<HashMap<Identifier, QuantFormat>>,
pub resources: HashMap<String, Arc<dyn Resource>>,
}
Expand Down
104 changes: 51 additions & 53 deletions nnef/src/framework.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ use tar::Builder;
use tract_core::tract_data::itertools::Itertools;

use crate::ast::quant::write_quant_format;
use crate::ast::{Document, Identifier, ProtoModel, QuantFormat};
use crate::ast::{Document, Identifier, LazyReader, ProtoModel, QuantFormat};
use crate::resource::{LazyDat, LazyDatLoader};
use crate::{internal::*, nnef};
use std::io::Read;
#[cfg(target_family = "unix")]
Expand All @@ -27,6 +28,7 @@ impl Default for Nnef {
stdlib: stdlib(),
registries: vec![crate::ops::tract_nnef()],
resource_loaders: vec![
LazyDatLoader.into_boxed(),
GraphNnefLoader.into_boxed(),
DatLoader.into_boxed(),
GraphQuantLoader.into_boxed(),
Expand Down Expand Up @@ -267,7 +269,13 @@ impl tract_core::prelude::Framework<ProtoModel, TypedModel> for Nnef {
.skip(path.components().count())
.collect::<std::path::PathBuf>();
let mut stream = std::fs::File::open(entry.path())?;
read_stream(&subpath, &mut stream, &mut resources, self)?;
read_stream(
&subpath,
Some(LazyReader::File(entry.path().to_owned())),
&mut stream,
&mut resources,
self,
)?;
}
proto_model_from_resources(resources)
}
Expand All @@ -293,7 +301,7 @@ impl tract_core::prelude::Framework<ProtoModel, TypedModel> for Nnef {
for entry in tar.entries()? {
let mut entry = entry?;
let path = entry.path()?.to_path_buf();
read_stream(&path, &mut entry, &mut resources, self)?;
read_stream(&path, None, &mut entry, &mut resources, self)?;
}
proto_model_from_resources(resources)
}
Expand All @@ -313,7 +321,7 @@ fn proto_model_from_resources(
// Iter resources IDs to detect submodels. Submodels are IDs with
// - two path compoents (ex: XXX/file)
// - a graph.nnef file as filename
let sub_models = resources
let sub_model_ids = resources
.keys()
.clone()
.filter_map(|id| {
Expand All @@ -330,8 +338,8 @@ fn proto_model_from_resources(

// If there are submodels, we use the associated resources to create a TypedModel resource and add
// it as a new resource.
let mut new_resources = if sub_models.len() > 0 {
sub_models.into_iter().try_fold(resources, |r, it| -> TractResult<HashMap<_, _>> {
let new_resources = if sub_model_ids.len() > 0 {
sub_model_ids.into_iter().try_fold(resources, |r, it| -> TractResult<HashMap<_, _>> {
let (submodel_resources, mut resources): (HashMap<String, Arc<dyn Resource>>, _) =
r.into_iter().partition(|(k, _v)| k.starts_with(&it));
let submodel_resources = submodel_resources
Expand All @@ -347,56 +355,44 @@ fn proto_model_from_resources(
resources
};

// NNEF document extraction
let doc = new_resources
.remove(crate::resource::GRAPH_NNEF_FILENAME)
.with_context(|| {
anyhow!("Resource {} was not found in the model", crate::resource::GRAPH_NNEF_FILENAME)
})?
.downcast_arc::<Document>()
.map_err(|_| anyhow!("Error while downcasting NNEF document resource"))?;

let doc = Arc::try_unwrap(doc)
.map_err(|_| anyhow!("Error while extracting NNEF Document from shared reference. Only one reference to the document is expected"))?;

// Collect all resources that can be downcastable to Arc<Tensor>.
let tensors: HashMap<_, _> = new_resources
.iter()
.filter_map(|(key, resource)| {
Arc::clone(resource)
.downcast_arc::<Tensor>()
.ok()
.map(|r| (Identifier::from(&**key), r))
})
.collect();
// Iterate over tensors keys to remove them from the global resources hash map.
tensors.keys().for_each(|k| {
new_resources.remove(&*k.0);
});

// Quantization format resources extraction if present.
let quantization = if let Some(q_r) =
new_resources.remove(crate::resource::GRAPH_QUANT_FILENAME)
{
let Ok(q_r) = q_r.downcast_arc::<HashMap<String, QuantFormat>>() else {
bail!("Error while downcasting quantization format resource")
};
let Ok(q_r) = Arc::try_unwrap(q_r) else {
bail!("Error while extracting quantization format resource from shared reference. Only one reference to it is expected")
};
Some(q_r.into_iter().map(|(k, v)| (Identifier(k), v)).collect())
} else {
None
};
let mut resources = HashMap::default();
let mut tensors = HashMap::default();
let mut lazy_tensors = HashMap::default();
let mut doc: Option<Arc<Document>> = None;
let mut quantization = None;
for (k, res) in new_resources {
if let Ok(t) = res.clone().downcast_arc::<Tensor>() {
tensors.insert(Identifier(k), t);
} else if let Ok(t) = res.clone().downcast_arc::<LazyDat>() {
lazy_tensors.insert(Identifier(k), t);
} else if k == crate::resource::GRAPH_NNEF_FILENAME {
doc = Some(
res.downcast_arc::<Document>()
.map_err(|_| anyhow!("graph.nnef must be a Document"))?,
);
} else if k == crate::resource::GRAPH_QUANT_FILENAME {
let map = res
.downcast_arc::<HashMap<String, QuantFormat>>()
.map_err(|_| anyhow!("graph.quant must be quantization information"))?;
quantization =
Some(map.iter().map(|(k, v)| (Identifier::from(&**k), v.clone())).collect())
} else {
resources.insert(k, res);
}
}

let Some(doc) = doc else { bail!("Could not find graph.nnef") };
let doc = Arc::try_unwrap(doc).unwrap();

let proto = ProtoModel { doc, tensors, quantization, resources: new_resources };
let proto = ProtoModel { doc, tensors, lazy_tensors, quantization, resources };
proto.validate()?;
Ok(proto)
}

fn read_stream<R: std::io::Read>(
fn read_stream(
path: &Path,
reader: &mut R,
lazy_data_provider: Option<LazyReader>,
reader: &mut impl std::io::Read,
resources: &mut HashMap<String, Arc<dyn Resource>>,
framework: &Nnef,
) -> TractResult<()> {
Expand All @@ -408,9 +404,11 @@ fn read_stream<R: std::io::Read>(
let mut last_loader_name;
for loader in framework.resource_loaders.iter() {
last_loader_name = Some(loader.name());
let loaded = loader.try_load(path, reader, framework).with_context(|| {
anyhow!("Error while loading resource by {:?} at path {:?}", loader.name(), path)
})?;
let loaded = loader
.try_load(path, lazy_data_provider.clone(), reader, framework)
.with_context(|| {
anyhow!("Error while loading resource by {:?} at path {:?}", loader.name(), path)
})?;
if let Some((id, resource)) = loaded {
ensure!(
!resources.contains_key(&id),
Expand Down
5 changes: 3 additions & 2 deletions nnef/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,15 @@ pub mod prelude {
pub mod internal {
pub use crate::ast::parse::parse_parameters;
pub use crate::ast::{
param, FragmentDecl, FragmentDef, Identifier, Parameter, RValue, TypeName,
param, FragmentDecl, FragmentDef, Identifier, LazyReader, Parameter, RValue, TypeName,
};
pub use crate::deser::{ModelBuilder, ResolvedInvocation, Value};
pub use crate::framework::Nnef;
pub use crate::prelude::*;
pub use crate::registry::*;
pub use crate::resource::{
DatLoader, GraphNnefLoader, GraphQuantLoader, Resource, ResourceLoader, TypedModelResource, TypedModelLoader,
DatLoader, GraphNnefLoader, GraphQuantLoader, Resource, ResourceLoader, TypedModelLoader,
TypedModelResource,
};
pub use crate::ser::{invocation, logical, numeric, string, IntoAst};
pub use std::any::TypeId;
Expand Down
48 changes: 28 additions & 20 deletions nnef/src/ops/nnef/deser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,42 +39,50 @@ pub fn external(builder: &mut ModelBuilder, invocation: &ResolvedInvocation) ->
pub fn variable(builder: &mut ModelBuilder, invocation: &ResolvedInvocation) -> TractResult<Value> {
let shape: TVec<usize> = invocation.named_arg_as(builder, "shape")?;
let label = Identifier(invocation.named_arg_as(builder, "label")?);
let sanitized_label = Identifier(label.0.trim_start_matches('/').to_owned());
let lazy_tensors = &builder.proto_model.lazy_tensors;
let tensors = &builder.proto_model.tensors;
let mut tensor = Arc::clone(
tensors
.get(&label)
.or_else(|| tensors.get(&Identifier(label.0.trim_start_matches('/').to_owned())))
.ok_or_else(|| format_err!("No data for tensor {:?}", label))?,
);

let mut wire = if let Some(t) = tensors.get(&label).or_else(|| tensors.get(&sanitized_label)) {
builder.wire_as_outlets(tract_core::ops::konst::Const::new(t.clone()), &[])?
} else if let Some(lt) = lazy_tensors.get(&label).or_else(|| lazy_tensors.get(&sanitized_label))
{
builder.wire_as_outlets(tract_core::ops::konst::LazyConst::new(lt.clone()), &[])?
} else {
bail!("No data for tensor {:?}", label)
};
let fact = builder.model.outlet_fact(wire[0])?;
if fact.shape.as_concrete().unwrap() != &*shape {
bail!(
"Wrong shape for tensor: {:?}, tensor file says {:?}, graph files says {:?}",
label,
fact.shape,
shape
);
}

if let Some(Some(dt)) = invocation.dt_from_quant_file.first() {
if dt.size_of() != tensor.datum_type().size_of() {
if dt.size_of() != fact.datum_type.size_of() {
bail!(
"Mismatched tensor type for tensor {}: expected {:?}, got {:?}",
label.0,
*dt,
tensor.datum_type()
fact.datum_type
);
}
if *dt != tensor.datum_type() {
if *dt != fact.datum_type {
trace!(
"Casting tensor {} from {:?} to {:?} when deserializing",
label.0,
tensor.datum_type(),
fact.datum_type,
*dt
);
//FIXME: avoid cast by late-loading tensors ?
tensor = tensor.cast_to_dt(*dt)?.into_owned().into_arc_tensor()
wire = builder.wire_as_outlets(cast(*dt), &wire)?;
}
}
if tensor.shape() != &*shape {
bail!(
"Wrong shape for tensor: {:?}, tensor file says {:?}, graph files says {:?}",
label,
tensor.shape(),
shape
);
}
builder.wire(tract_core::ops::konst::Const::new(tensor), &[])

Ok(wire.into())
}

// fragment reshape<?>( input: tensor<?>, shape: integer[], axis_start: integer = 0, axis_count: integer = -1 )
Expand Down
Loading
Loading