Skip to content

Commit

Permalink
cleanup top level exports (#2382)
Browse files Browse the repository at this point in the history
remove some top level exports
  • Loading branch information
PSeitz authored May 7, 2024
1 parent 92b5526 commit eea7003
Show file tree
Hide file tree
Showing 25 changed files with 59 additions and 59 deletions.
3 changes: 2 additions & 1 deletion examples/custom_collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,10 @@ use columnar::Column;
// ---
// Importing tantivy...
use tantivy::collector::{Collector, SegmentCollector};
use tantivy::index::SegmentReader;
use tantivy::query::QueryParser;
use tantivy::schema::{Schema, FAST, INDEXED, TEXT};
use tantivy::{doc, Index, IndexWriter, Score, SegmentReader};
use tantivy::{doc, Index, IndexWriter, Score};

#[derive(Default)]
struct Stats {
Expand Down
2 changes: 1 addition & 1 deletion examples/date_time_field.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ fn main() -> tantivy::Result<()> {
let opts = DateOptions::from(INDEXED)
.set_stored()
.set_fast()
.set_precision(tantivy::DateTimePrecision::Seconds);
.set_precision(tantivy::schema::DateTimePrecision::Seconds);
// Add `occurred_at` date field type
let occurred_at = schema_builder.add_date_field("occurred_at", opts);
let event_type = schema_builder.add_text_field("event", STRING | STORED);
Expand Down
3 changes: 2 additions & 1 deletion examples/iterating_docs_and_positions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,11 @@
// the list of documents containing a term, getting
// its term frequency, and accessing its positions.

use tantivy::postings::Postings;
// ---
// Importing tantivy...
use tantivy::schema::*;
use tantivy::{doc, DocSet, Index, IndexWriter, Postings, TERMINATED};
use tantivy::{doc, DocSet, Index, IndexWriter, TERMINATED};

fn main() -> tantivy::Result<()> {
// We first create a schema for the sake of the
Expand Down
3 changes: 2 additions & 1 deletion examples/warmer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@ use std::collections::{HashMap, HashSet};
use std::sync::{Arc, RwLock, Weak};

use tantivy::collector::TopDocs;
use tantivy::index::SegmentId;
use tantivy::query::QueryParser;
use tantivy::schema::{Schema, FAST, TEXT};
use tantivy::{
doc, DocAddress, DocId, Index, IndexWriter, Opstamp, Searcher, SearcherGeneration, SegmentId,
doc, DocAddress, DocId, Index, IndexWriter, Opstamp, Searcher, SearcherGeneration,
SegmentReader, Warmer,
};

Expand Down
3 changes: 2 additions & 1 deletion src/aggregation/agg_req_with_accessor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ use super::metric::{
use super::segment_agg_result::AggregationLimits;
use super::VecWithNames;
use crate::aggregation::{f64_to_fastfield_u64, Key};
use crate::{SegmentOrdinal, SegmentReader};
use crate::index::SegmentReader;
use crate::SegmentOrdinal;

#[derive(Default)]
pub(crate) struct AggregationsWithAccessor {
Expand Down
3 changes: 2 additions & 1 deletion src/aggregation/collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ use super::segment_agg_result::{
};
use crate::aggregation::agg_req_with_accessor::get_aggs_with_segment_accessor_and_validate;
use crate::collector::{Collector, SegmentCollector};
use crate::{DocId, SegmentOrdinal, SegmentReader, TantivyError};
use crate::index::SegmentReader;
use crate::{DocId, SegmentOrdinal, TantivyError};

/// The default max bucket count, before the aggregation fails.
pub const DEFAULT_BUCKET_LIMIT: u32 = 65000;
Expand Down
3 changes: 2 additions & 1 deletion src/collector/top_collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@ use std::marker::PhantomData;
use serde::{Deserialize, Serialize};

use super::top_score_collector::TopNComputer;
use crate::{DocAddress, DocId, SegmentOrdinal, SegmentReader};
use crate::index::SegmentReader;
use crate::{DocAddress, DocId, SegmentOrdinal};

/// Contains a feature (field, score, etc.) of a document along with the document address.
///
Expand Down
4 changes: 2 additions & 2 deletions src/core/searcher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@ use std::{fmt, io};

use crate::collector::Collector;
use crate::core::Executor;
use crate::index::SegmentReader;
use crate::index::{SegmentId, SegmentReader};
use crate::query::{Bm25StatisticsProvider, EnableScoring, Query};
use crate::schema::document::DocumentDeserialize;
use crate::schema::{Schema, Term};
use crate::space_usage::SearcherSpaceUsage;
use crate::store::{CacheStats, StoreReader};
use crate::{DocAddress, Index, Opstamp, SegmentId, TrackedObject};
use crate::{DocAddress, Index, Opstamp, TrackedObject};

/// Identifies the searcher generation accessed by a [`Searcher`].
///
Expand Down
6 changes: 4 additions & 2 deletions src/core/tests.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
use crate::collector::Count;
use crate::directory::{RamDirectory, WatchCallback};
use crate::index::SegmentId;
use crate::indexer::{LogMergePolicy, NoMergePolicy};
use crate::postings::Postings;
use crate::query::TermQuery;
use crate::schema::{Field, IndexRecordOption, Schema, INDEXED, STRING, TEXT};
use crate::tokenizer::TokenizerManager;
use crate::{
Directory, DocSet, Index, IndexBuilder, IndexReader, IndexSettings, IndexWriter, Postings,
ReloadPolicy, SegmentId, TantivyDocument, Term,
Directory, DocSet, Index, IndexBuilder, IndexReader, IndexSettings, IndexWriter, ReloadPolicy,
TantivyDocument, Term,
};

#[test]
Expand Down
9 changes: 5 additions & 4 deletions src/fastfield/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,22 +80,23 @@ mod tests {
use std::path::Path;

use columnar::StrColumn;
use common::{ByteCount, HasLen, TerminatingWrite};
use common::{ByteCount, DateTimePrecision, HasLen, TerminatingWrite};
use once_cell::sync::Lazy;
use rand::prelude::SliceRandom;
use rand::rngs::StdRng;
use rand::{Rng, SeedableRng};

use super::*;
use crate::directory::{Directory, RamDirectory, WritePtr};
use crate::index::SegmentId;
use crate::merge_policy::NoMergePolicy;
use crate::schema::{
Facet, FacetOptions, Field, JsonObjectOptions, Schema, SchemaBuilder, TantivyDocument,
TextOptions, FAST, INDEXED, STORED, STRING, TEXT,
DateOptions, Facet, FacetOptions, Field, JsonObjectOptions, Schema, SchemaBuilder,
TantivyDocument, TextOptions, FAST, INDEXED, STORED, STRING, TEXT,
};
use crate::time::OffsetDateTime;
use crate::tokenizer::{LowerCaser, RawTokenizer, TextAnalyzer, TokenizerManager};
use crate::{DateOptions, DateTimePrecision, Index, IndexWriter, SegmentId, SegmentReader};
use crate::{Index, IndexWriter, SegmentReader};

pub static SCHEMA: Lazy<Schema> = Lazy::new(|| {
let mut schema_builder = Schema::builder();
Expand Down
4 changes: 2 additions & 2 deletions src/fastfield/writer.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
use std::io;

use columnar::{ColumnarWriter, NumericalValue};
use common::JsonPathWriter;
use common::{DateTimePrecision, JsonPathWriter};
use tokenizer_api::Token;

use crate::indexer::doc_id_mapping::DocIdMapping;
use crate::schema::document::{Document, ReferenceValue, ReferenceValueLeaf, Value};
use crate::schema::{value_type_to_column_type, Field, FieldType, Schema, Type};
use crate::tokenizer::{TextAnalyzer, TokenizerManager};
use crate::{DateTimePrecision, DocId, TantivyError};
use crate::{DocId, TantivyError};

/// Only index JSON down to a depth of 20.
/// This is mostly to guard us from a stack overflow triggered by malicious input.
Expand Down
2 changes: 0 additions & 2 deletions src/index/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
//! # Index Module
//!
//! The `index` module in Tantivy contains core components to read and write indexes.
//!
//! It contains `Index` and `Segment`, where a `Index` consists of one or more `Segment`s.
Expand Down
3 changes: 2 additions & 1 deletion src/indexer/delete_queue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -246,8 +246,9 @@ impl DeleteCursor {
mod tests {

use super::{DeleteOperation, DeleteQueue};
use crate::index::SegmentReader;
use crate::query::{Explanation, Scorer, Weight};
use crate::{DocId, Score, SegmentReader};
use crate::{DocId, Score};

struct DummyWeight;
impl Weight for DummyWeight {
Expand Down
4 changes: 2 additions & 2 deletions src/indexer/log_merge_policy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -144,9 +144,9 @@ mod tests {
use once_cell::sync::Lazy;

use super::*;
use crate::index::SegmentMetaInventory;
use crate::index::{SegmentId, SegmentMetaInventory};
use crate::schema;
use crate::schema::INDEXED;
use crate::{schema, SegmentId};

static INVENTORY: Lazy<SegmentMetaInventory> = Lazy::new(SegmentMetaInventory::default);

Expand Down
3 changes: 2 additions & 1 deletion src/indexer/merge_operation.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
use std::collections::HashSet;
use std::ops::Deref;

use crate::{Inventory, Opstamp, SegmentId, TrackedObject};
use crate::index::SegmentId;
use crate::{Inventory, Opstamp, TrackedObject};

#[derive(Default)]
pub(crate) struct MergeOperationInventory(Inventory<InnerMergeOperation>);
Expand Down
9 changes: 4 additions & 5 deletions src/indexer/merger.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,15 @@ use crate::docset::{DocSet, TERMINATED};
use crate::error::DataCorruption;
use crate::fastfield::{AliveBitSet, FastFieldNotAvailableError};
use crate::fieldnorm::{FieldNormReader, FieldNormReaders, FieldNormsSerializer, FieldNormsWriter};
use crate::index::{Segment, SegmentReader};
use crate::index::{Segment, SegmentComponent, SegmentReader};
use crate::indexer::doc_id_mapping::{MappingType, SegmentDocIdMapping};
use crate::indexer::SegmentSerializer;
use crate::postings::{InvertedIndexSerializer, Postings, SegmentPostings};
use crate::schema::{value_type_to_column_type, Field, FieldType, Schema};
use crate::store::StoreWriter;
use crate::termdict::{TermMerger, TermOrdinal};
use crate::{
DocAddress, DocId, IndexSettings, IndexSortByField, InvertedIndexReader, Order,
SegmentComponent, SegmentOrdinal,
DocAddress, DocId, IndexSettings, IndexSortByField, InvertedIndexReader, Order, SegmentOrdinal,
};

/// Segment's max doc must be `< MAX_DOC_LIMIT`.
Expand Down Expand Up @@ -794,7 +793,7 @@ mod tests {
BytesFastFieldTestCollector, FastFieldTestCollector, TEST_COLLECTOR_WITH_SCORE,
};
use crate::collector::{Count, FacetCollector};
use crate::index::Index;
use crate::index::{Index, SegmentId};
use crate::query::{AllQuery, BooleanQuery, EnableScoring, Scorer, TermQuery};
use crate::schema::document::Value;
use crate::schema::{
Expand All @@ -804,7 +803,7 @@ mod tests {
use crate::time::OffsetDateTime;
use crate::{
assert_nearly_equals, schema, DateTime, DocAddress, DocId, DocSet, IndexSettings,
IndexSortByField, IndexWriter, Order, Searcher, SegmentId,
IndexSortByField, IndexWriter, Order, Searcher,
};

#[test]
Expand Down
5 changes: 3 additions & 2 deletions src/indexer/merger_sorted_index_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,16 @@ mod tests {
use crate::collector::TopDocs;
use crate::fastfield::AliveBitSet;
use crate::index::Index;
use crate::postings::Postings;
use crate::query::QueryParser;
use crate::schema::document::Value;
use crate::schema::{
self, BytesOptions, Facet, FacetOptions, IndexRecordOption, NumericOptions,
TextFieldIndexing, TextOptions,
};
use crate::{
DocAddress, DocSet, IndexSettings, IndexSortByField, IndexWriter, Order, Postings,
TantivyDocument, Term,
DocAddress, DocSet, IndexSettings, IndexSortByField, IndexWriter, Order, TantivyDocument,
Term,
};

fn create_test_index_posting_list_issue(index_settings: Option<IndexSettings>) -> Index {
Expand Down
10 changes: 5 additions & 5 deletions src/indexer/segment_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use super::doc_id_mapping::{get_doc_id_mapping_from_field, DocIdMapping};
use super::operation::AddOperation;
use crate::fastfield::FastFieldsWriter;
use crate::fieldnorm::{FieldNormReaders, FieldNormsWriter};
use crate::index::Segment;
use crate::index::{Segment, SegmentComponent};
use crate::indexer::segment_serializer::SegmentSerializer;
use crate::json_utils::{index_json_value, IndexingPositionsPerPath};
use crate::postings::{
Expand All @@ -18,7 +18,7 @@ use crate::schema::document::{Document, Value};
use crate::schema::{FieldEntry, FieldType, Schema, Term, DATE_TIME_PRECISION_INDEXED};
use crate::store::{StoreReader, StoreWriter};
use crate::tokenizer::{FacetTokenizer, PreTokenizedStream, TextAnalyzer, Tokenizer};
use crate::{DocId, Opstamp, SegmentComponent, TantivyError};
use crate::{DocId, Opstamp, TantivyError};

/// Computes the initial size of the hash table.
///
Expand Down Expand Up @@ -498,7 +498,7 @@ mod tests {
use crate::collector::{Count, TopDocs};
use crate::directory::RamDirectory;
use crate::fastfield::FastValue;
use crate::postings::TermInfo;
use crate::postings::{Postings, TermInfo};
use crate::query::{PhraseQuery, QueryParser};
use crate::schema::document::Value;
use crate::schema::{
Expand All @@ -510,8 +510,8 @@ mod tests {
use crate::time::OffsetDateTime;
use crate::tokenizer::{PreTokenizedString, Token};
use crate::{
DateTime, Directory, DocAddress, DocSet, Index, IndexWriter, Postings, TantivyDocument,
Term, TERMINATED,
DateTime, Directory, DocAddress, DocSet, Index, IndexWriter, TantivyDocument, Term,
TERMINATED,
};

#[test]
Expand Down
20 changes: 5 additions & 15 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -216,28 +216,17 @@ use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize};

pub use self::docset::{DocSet, COLLECT_BLOCK_BUFFER_LEN, TERMINATED};
#[deprecated(
since = "0.22.0",
note = "Will be removed in tantivy 0.23. Use export from snippet module instead"
)]
pub use self::snippet::{Snippet, SnippetGenerator};
#[doc(hidden)]
pub use crate::core::json_utils;
pub use crate::core::{Executor, Searcher, SearcherGeneration};
pub use crate::directory::Directory;
#[allow(deprecated)] // Remove with index sorting
pub use crate::index::{
Index, IndexBuilder, IndexMeta, IndexSettings, IndexSortByField, InvertedIndexReader, Order,
Segment, SegmentComponent, SegmentId, SegmentMeta, SegmentReader,
Segment, SegmentMeta, SegmentReader,
};
#[deprecated(
since = "0.22.0",
note = "Will be removed in tantivy 0.23. Use export from indexer module instead"
)]
pub use crate::indexer::PreparedCommit;
pub use crate::indexer::{IndexWriter, SingleSegmentIndexWriter};
pub use crate::postings::Postings;
pub use crate::schema::{DateOptions, DateTimePrecision, Document, TantivyDocument, Term};
pub use crate::schema::{Document, TantivyDocument, Term};

/// Index format version.
const INDEX_FORMAT_VERSION: u32 = 6;
Expand Down Expand Up @@ -392,9 +381,10 @@ pub mod tests {
use crate::docset::{DocSet, TERMINATED};
use crate::index::SegmentReader;
use crate::merge_policy::NoMergePolicy;
use crate::postings::Postings;
use crate::query::BooleanQuery;
use crate::schema::*;
use crate::{DateTime, DocAddress, Index, IndexWriter, Postings, ReloadPolicy};
use crate::{DateTime, DocAddress, Index, IndexWriter, ReloadPolicy};

pub fn fixed_size_test<O: BinarySerializable + FixedSize + Default>() {
let mut buffer = Vec::new();
Expand Down Expand Up @@ -1109,9 +1099,9 @@ pub mod tests {
#[test]
fn test_update_via_delete_insert() -> crate::Result<()> {
use crate::collector::Count;
use crate::index::SegmentId;
use crate::indexer::NoMergePolicy;
use crate::query::AllQuery;
use crate::SegmentId;

const DOC_COUNT: u64 = 2u64;

Expand Down
2 changes: 1 addition & 1 deletion src/postings/serializer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ pub struct InvertedIndexSerializer {
impl InvertedIndexSerializer {
/// Open a new `InvertedIndexSerializer` for the given segment
pub fn open(segment: &mut Segment) -> crate::Result<InvertedIndexSerializer> {
use crate::SegmentComponent::{Positions, Postings, Terms};
use crate::index::SegmentComponent::{Positions, Postings, Terms};
let inv_index_serializer = InvertedIndexSerializer {
terms_write: CompositeWrite::wrap(segment.open_write(Terms)?),
postings_write: CompositeWrite::wrap(segment.open_write(Postings)?),
Expand Down
3 changes: 2 additions & 1 deletion src/query/empty_query.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
use super::Scorer;
use crate::docset::TERMINATED;
use crate::index::SegmentReader;
use crate::query::explanation::does_not_match;
use crate::query::{EnableScoring, Explanation, Query, Weight};
use crate::{DocId, DocSet, Score, Searcher, SegmentReader};
use crate::{DocId, DocSet, Score, Searcher};

/// `EmptyQuery` is a dummy `Query` in which no document matches.
///
Expand Down
4 changes: 2 additions & 2 deletions src/query/term_query/term_scorer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,15 +127,15 @@ impl Scorer for TermScorer {
mod tests {
use proptest::prelude::*;

use crate::index::SegmentId;
use crate::indexer::index_writer::MEMORY_BUDGET_NUM_BYTES_MIN;
use crate::merge_policy::NoMergePolicy;
use crate::postings::compression::COMPRESSION_BLOCK_SIZE;
use crate::query::term_query::TermScorer;
use crate::query::{Bm25Weight, EnableScoring, Scorer, TermQuery};
use crate::schema::{IndexRecordOption, Schema, TEXT};
use crate::{
assert_nearly_equals, DocId, DocSet, Index, IndexWriter, Score, Searcher, SegmentId, Term,
TERMINATED,
assert_nearly_equals, DocId, DocSet, Index, IndexWriter, Score, Searcher, Term, TERMINATED,
};

#[test]
Expand Down
3 changes: 2 additions & 1 deletion src/reader/warming.rs
Original file line number Diff line number Diff line change
Expand Up @@ -179,9 +179,10 @@ mod tests {
use super::Warmer;
use crate::core::searcher::SearcherGeneration;
use crate::directory::RamDirectory;
use crate::index::SegmentId;
use crate::indexer::index_writer::MEMORY_BUDGET_NUM_BYTES_MIN;
use crate::schema::{Schema, INDEXED};
use crate::{Index, IndexSettings, ReloadPolicy, Searcher, SegmentId};
use crate::{Index, IndexSettings, ReloadPolicy, Searcher};

#[derive(Default)]
struct TestWarmer {
Expand Down
Loading

0 comments on commit eea7003

Please sign in to comment.