diff --git a/README.md b/README.md index 67c879b..5ca2954 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # obj-simplify -There are a lot of authoring tools that produce OBJ files. The [spec](http://www.martinreddy.net/gfx/3d/OBJ.spec) is quite simple, but it still leaves a lot of room to export geometry and meshe/material combos that are not always optimal for 3D rendering engines. Artists and the exporters can also omit doing simple cleanup operations that would reduce file size, making loading and rendering the model faster. +There are a lot of authoring tools that produce OBJ files. The [spec](http://www.martinreddy.net/gfx/3d/OBJ.spec) is quite simple, but it still leaves a lot of room to export geometry and mesh/material combos that are not always optimal for 3D rendering engines. Artists and the exporters can also omit doing simple cleanup operations that would reduce file size, making loading and rendering the model faster. The biggest problem in an average OBJ export is the amount of draw calls that can be reduced trivially, but is rarely done in the authoring tool. @@ -14,12 +14,12 @@ This tool automates the following optimization and simplification steps. This tool can be destructive and contain bugs, it will not let you overwrite the source file. Keep your original files intact. The implementation does not support all the OBJ features out there. It is meant to be used on 3D-models that declare faces with `f`. All variants of face declarations in the spec are supported. Lines `l` and points `p` are also preserved and the same deduplication logic is applied to them. - + If a particular line in the input file is not supported by the parser, the tool will exit and print a link to submit an issue. If you are submitting an issue please attach a file that can reproduce the bug. ## Merging duplicate geometry -Use `-eplison` to tune vector equality checks, the default is `1e-6`. This can have a positive impact especially on large OBJ files. Basic cleanup like trimming trailing zeros and converting -0 into 0 to reduce file size is also executed. +Use `-epsilon` to tune vector equality checks, the default is `1e-6`. This can have a positive impact especially on large OBJ files. Basic cleanup like trimming trailing zeros and converting -0 into 0 to reduce file size is also executed. ## Object merging and multi-materials @@ -33,14 +33,14 @@ All found geometry from the source file is written at the top of the file, skipp ## three.js -I have contributed to the OBJ parser/loader in three.js and know it very well. I know what kind of files it has performance problems with and how to try to avoid them. I have also implemented some of the optimization done in this tool in JS on the client side, after the model has been loaded. But even if doable, its a waste of time to do them on each load for each user. Also certain optimizations can not be done on the client side. That being said there is nothing spesific in the tool for three.js, it can help as much in other rendering engines. This tool can help you get: +I have contributed to the OBJ parser/loader in three.js and know it very well. I know what kind of files it has performance problems with and how to try to avoid them. I have also implemented some of the optimization done in this tool in JS on the client side, after the model has been loaded. But even if doable, its a waste of time to do them on each load for each user. Also certain optimizations can not be done on the client side. That being said there is nothing specific in the tool for three.js, it can help as much in other rendering engines. This tool can help you get: * Faster load over the network * Reduce filesize, possibly better compression e.g. with gzip (see `-gzip`). * Faster loading by the parser * Drop duplicates, reduce files size in general to parse less lines. - * Arraging file output in a way that *might* benefit V8 etc. to optimize the execution better. -* Faster rendering + * Arranging file output in a way that *might* benefit V8 etc. to optimize the execution better. +* Faster rendering * Remove patterns that result in using `THREE.MultiMaterial`. * Reduce draw calls. @@ -72,7 +72,7 @@ obj-simplify { "Output": "test.simplified.obj", "Workers": 32, "Gzip": -1, - "Eplison": 1e-06, + "Epsilon": 1e-06, "Strict": false, "Stdout": false, "Quiet": false, diff --git a/main.go b/main.go index d693ae3..81c442f 100644 --- a/main.go +++ b/main.go @@ -17,7 +17,7 @@ import ( var ( StartParams = startParams{ Gzip: -1, - Eplison: 1e-6, + Epsilon: 1e-6, } ApplicationName = "obj-simplify" @@ -38,7 +38,7 @@ type startParams struct { Workers int Gzip int - Eplison float64 + Epsilon float64 Strict bool Stdout bool @@ -68,8 +68,8 @@ func init() { "workers", StartParams.Workers, "Number of worker goroutines.") flag.IntVar(&StartParams.Gzip, "gzip", StartParams.Gzip, "Gzip compression level on the output for both -stdout and -out. <=0 disables compression, use 1 (best speed) to 9 (best compression) to enable.") - flag.Float64Var(&StartParams.Eplison, - "epsilon", StartParams.Eplison, "Epsilon for float comparisons.") + flag.Float64Var(&StartParams.Epsilon, + "epsilon", StartParams.Epsilon, "Epsilon for float comparisons.") flag.BoolVar(&StartParams.Strict, "strict", StartParams.Strict, "Errors out on spec violations, otherwise continues if the error is recoverable.") diff --git a/process-duplicates.go b/process-duplicates.go index 362b1f7..1a3bd63 100644 --- a/process-duplicates.go +++ b/process-duplicates.go @@ -113,7 +113,7 @@ func (r *replacer) Merge(other *replacer) { if r.hasItems && r.replaces[value.Index] != nil { // straight up duplicate other.Remove(value.Index) - } else if r.ref.Equals(value, StartParams.Eplison) { + } else if r.ref.Equals(value, StartParams.Epsilon) { // move equals hit to r from other r.Hit(value) other.Remove(value.Index) @@ -167,7 +167,7 @@ func (processor Duplicates) Execute(obj *objectfile.OBJ) error { mResults = sync.RWMutex{} wg = &sync.WaitGroup{} preStats = obj.Geometry.Stats() - epsilon = StartParams.Eplison + epsilon = StartParams.Epsilon progressEnabled = !StartParams.NoProgress ) @@ -369,7 +369,7 @@ func findDuplicates(t objectfile.Type, slice []*objectfile.GeometryValue, epsilo // r1 geom value equals r2. // only merge r2 hits where value equals r1, otherwise // we would do transitive merges which is not what we want: - // eg. r1 closer than eplison to r2, but r1 further than epsilon to r2.hitN + // eg. r1 closer than epsilon to r2, but r1 further than epsilon to r2.hitN r1.Merge(r2) // r1 might now be empty if r2 was its only hit, // and it was not completely merged.