Skip to content

Commit

Permalink
build(deps): update noodles to 0.60, new clippy warnings
Browse files Browse the repository at this point in the history
  • Loading branch information
mmalenic committed Jan 2, 2024
1 parent 64a3ddc commit 442c195
Show file tree
Hide file tree
Showing 11 changed files with 629 additions and 455 deletions.
1,002 changes: 581 additions & 421 deletions Cargo.lock

Large diffs are not rendered by default.

3 changes: 1 addition & 2 deletions htsget-actix/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -201,8 +201,7 @@ mod tests {
#[async_trait(?Send)]
impl TestServer<ActixTestRequest<test::TestRequest>> for ActixTestServer {
async fn get_expected_path(&self) -> String {
let mut bind_data_server =
BindDataServer::try_from(self.get_config().data_server().clone()).unwrap();
let mut bind_data_server = BindDataServer::from(self.get_config().data_server().clone());
let server = bind_data_server.bind_data_server().await.unwrap();
let addr = server.local_addr();

Expand Down
2 changes: 1 addition & 1 deletion htsget-config/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ default = []
[dependencies]
thiserror = "1.0"
async-trait = "0.1"
noodles = { version = "0.50", features = ["core"] }
noodles = { version = "0.60", features = ["core"] }
serde = { version = "1.0", features = ["derive"] }
serde_with = "3.0"
serde_regex = "1.1"
Expand Down
2 changes: 1 addition & 1 deletion htsget-lambda/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,5 +34,5 @@ bytes = "1.4"

[dev-dependencies]
async-trait = "0.1"
query_map = { version = "0.6", features = ["url-query"] }
query_map = { version = "0.7", features = ["url-query"] }
tempfile = "3.6"
2 changes: 1 addition & 1 deletion htsget-lambda/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -708,7 +708,7 @@ mod tests {
}

async fn spawn_server(config: &Config) -> String {
let mut bind_data_server = BindDataServer::try_from(config.data_server().clone()).unwrap();
let mut bind_data_server = BindDataServer::from(config.data_server().clone());
let server = bind_data_server.bind_data_server().await.unwrap();
let addr = server.local_addr();

Expand Down
2 changes: 1 addition & 1 deletion htsget-search/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ futures-util = "0.3"
async-trait = "0.1"

# Noodles
noodles = { version = "0.50", features = ["async", "core", "bgzf", "bam", "bcf", "cram", "csi", "sam", "tabix", "vcf"] }
noodles = { version = "0.60", features = ["async", "core", "bgzf", "bam", "bcf", "cram", "csi", "sam", "tabix", "vcf"] }

# Amazon S3
bytes = { version = "1.4", optional = true }
Expand Down
17 changes: 10 additions & 7 deletions htsget-search/src/htsget/bam_search.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,12 @@ use std::sync::Arc;
use async_trait::async_trait;
use noodles::bam;
use noodles::bam::bai;
use noodles::bam::bai::Index;
use noodles::bgzf;
use noodles::bgzf::VirtualPosition;
use noodles::csi::index::ReferenceSequence;
use noodles::csi::Index;
use noodles::csi::binning_index::index::reference_sequence::index::LinearIndex;
use noodles::csi::binning_index::index::ReferenceSequence;
use noodles::csi::BinningIndex;
use noodles::sam::header::record::value::map::read_group::platform::ParseError;
use noodles::sam::header::record::value::map::read_group::Platform;
use noodles::sam::Header;
Expand All @@ -35,7 +37,8 @@ pub struct BamSearch<S> {
}

#[async_trait]
impl<S, ReaderType> BgzfSearch<S, ReaderType, AsyncReader<ReaderType>, Header> for BamSearch<S>
impl<S, ReaderType> BgzfSearch<S, LinearIndex, ReaderType, AsyncReader<ReaderType>, Header>
for BamSearch<S>
where
S: Storage<Streamable = ReaderType> + Send + Sync + 'static,
ReaderType: AsyncRead + Unpin + Send + Sync,
Expand All @@ -47,7 +50,7 @@ where
index: &Index,
) -> Result<Vec<BytesPosition>> {
trace!("getting byte ranges for unmapped reads");
let last_interval = index.first_record_in_last_linear_bin_start_position();
let last_interval = index.last_first_record_start_position();
let start = match last_interval {
Some(start) => start,
None => {
Expand Down Expand Up @@ -78,7 +81,8 @@ where
}

#[async_trait]
impl<S, ReaderType> Search<S, ReaderType, ReferenceSequence, Index, AsyncReader<ReaderType>, Header>
impl<S, ReaderType>
Search<S, ReaderType, ReferenceSequence<LinearIndex>, Index, AsyncReader<ReaderType>, Header>
for BamSearch<S>
where
S: Storage<Streamable = ReaderType> + Send + Sync + 'static,
Expand Down Expand Up @@ -121,7 +125,6 @@ where

async fn read_index_inner<T: AsyncRead + Unpin + Send>(inner: T) -> io::Result<Index> {
let mut reader = bai::AsyncReader::new(BufReader::new(inner));
reader.read_header().await?;
reader.read_index().await
}

Expand Down Expand Up @@ -150,7 +153,7 @@ where

#[async_trait]
impl<S, ReaderType>
SearchReads<S, ReaderType, ReferenceSequence, Index, AsyncReader<ReaderType>, Header>
SearchReads<S, ReaderType, ReferenceSequence<LinearIndex>, Index, AsyncReader<ReaderType>, Header>
for BamSearch<S>
where
S: Storage<Streamable = ReaderType> + Send + Sync + 'static,
Expand Down
9 changes: 6 additions & 3 deletions htsget-search/src/htsget/bcf_search.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@ use async_trait::async_trait;
use futures_util::stream::FuturesOrdered;
use noodles::bcf;
use noodles::bgzf::VirtualPosition;
use noodles::csi::index::ReferenceSequence;
use noodles::csi::binning_index::index::reference_sequence::index::BinnedIndex;
use noodles::csi::binning_index::index::ReferenceSequence;
use noodles::csi::Index;
use noodles::vcf::Header;
use noodles::{bgzf, csi};
Expand All @@ -28,7 +29,8 @@ pub struct BcfSearch<S> {
}

#[async_trait]
impl<S, ReaderType> BgzfSearch<S, ReaderType, AsyncReader<ReaderType>, Header> for BcfSearch<S>
impl<S, ReaderType> BgzfSearch<S, BinnedIndex, ReaderType, AsyncReader<ReaderType>, Header>
for BcfSearch<S>
where
S: Storage<Streamable = ReaderType> + Send + Sync + 'static,
ReaderType: AsyncRead + Unpin + Send + Sync,
Expand All @@ -43,7 +45,8 @@ where
}

#[async_trait]
impl<S, ReaderType> Search<S, ReaderType, ReferenceSequence, Index, AsyncReader<ReaderType>, Header>
impl<S, ReaderType>
Search<S, ReaderType, ReferenceSequence<BinnedIndex>, Index, AsyncReader<ReaderType>, Header>
for BcfSearch<S>
where
S: Storage<Streamable = ReaderType> + Send + Sync + 'static,
Expand Down
31 changes: 18 additions & 13 deletions htsget-search/src/htsget/search.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,11 @@ use async_trait::async_trait;
use futures::StreamExt;
use futures_util::stream::FuturesOrdered;
use noodles::bgzf::{gzi, VirtualPosition};
use noodles::csi::index::reference_sequence::bin::Chunk;
use noodles::csi::index::ReferenceSequence;
use noodles::csi::Index;
use noodles::csi::binning_index::index::reference_sequence::bin::Chunk;
use noodles::csi::binning_index::index::Index;
use noodles::csi::binning_index::index::{reference_sequence, ReferenceSequence};
use noodles::csi::binning_index::ReferenceSequence as ReferenceSequenceExt;
use noodles::csi::BinningIndex;
use tokio::io;
use tokio::io::{AsyncRead, BufReader};
use tokio::select;
Expand Down Expand Up @@ -373,22 +375,24 @@ where
/// BAM, BCF, and VCF.
///
/// [S] is the storage type.
/// [I] the index type used for the `ReferenceSequence`.
/// [ReaderType] is the inner type used for [Reader].
/// [ReferenceSequence] is the reference sequence type of the format's index.
/// [Index] is the format's index type.
/// [Reader] is the format's reader type.
/// [Header] is the format's header type.
#[async_trait]
pub trait BgzfSearch<S, ReaderType, Reader, Header>:
Search<S, ReaderType, ReferenceSequence, Index, Reader, Header>
pub trait BgzfSearch<S, I, ReaderType, Reader, Header>:
Search<S, ReaderType, ReferenceSequence<I>, Index<I>, Reader, Header>
where
S: Storage<Streamable = ReaderType> + Send + Sync + 'static,
I: reference_sequence::Index + Send + Sync,
ReaderType: AsyncRead + Unpin + Send + Sync,
Reader: Send + Sync,
Header: Send + Sync,
{
#[instrument(level = "trace", skip_all)]
fn index_positions(index: &Index) -> BTreeSet<u64> {
fn index_positions(index: &Index<I>) -> BTreeSet<u64> {
trace!("getting possible index positions");
let mut positions = BTreeSet::new();

Expand Down Expand Up @@ -425,7 +429,7 @@ where
&self,
query: &Query,
ref_seq_id: usize,
index: &Index,
index: &Index<I>,
) -> Result<Vec<BytesPosition>> {
let chunks: Result<Vec<Chunk>> = trace_span!("querying chunks").in_scope(|| {
trace!(id = ?query.id(), ref_seq_id = ?ref_seq_id, "querying chunks");
Expand Down Expand Up @@ -538,7 +542,7 @@ where
async fn get_byte_ranges_for_unmapped(
&self,
_query: &Query,
_index: &Index,
_index: &Index<I>,
) -> Result<Vec<BytesPosition>> {
Ok(Vec::new())
}
Expand All @@ -551,14 +555,15 @@ where
}

#[async_trait]
impl<S, ReaderType, Reader, Header, T>
SearchAll<S, ReaderType, ReferenceSequence, Index, Reader, Header> for T
impl<S, I, ReaderType, Reader, Header, T>
SearchAll<S, ReaderType, ReferenceSequence<I>, Index<I>, Reader, Header> for T
where
S: Storage<Streamable = ReaderType> + Send + Sync + 'static,
I: reference_sequence::Index + Send + Sync,
ReaderType: AsyncRead + Unpin + Send + Sync,
Reader: Send + Sync,
Header: Send + Sync,
T: BgzfSearch<S, ReaderType, Reader, Header> + Send + Sync,
T: BgzfSearch<S, I, ReaderType, Reader, Header> + Send + Sync,
{
#[instrument(level = "debug", skip(self), ret)]
async fn get_byte_ranges_for_all(&self, query: &Query) -> Result<Vec<BytesPosition>> {
Expand All @@ -568,7 +573,7 @@ where
}

#[instrument(level = "trace", skip_all, ret)]
async fn get_header_end_offset(&self, index: &Index) -> Result<u64> {
async fn get_header_end_offset(&self, index: &Index<I>) -> Result<u64> {
let first_index_position =
Self::index_positions(index)
.into_iter()
Expand All @@ -587,7 +592,7 @@ where

async fn get_byte_ranges_for_header(
&self,
index: &Index,
index: &Index<I>,
header: &Header,
reader: &mut Reader,
query: &Query,
Expand Down
12 changes: 8 additions & 4 deletions htsget-search/src/htsget/vcf_search.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,11 @@ use async_trait::async_trait;
use futures_util::stream::FuturesOrdered;
use noodles::bgzf;
use noodles::bgzf::VirtualPosition;
use noodles::csi::index::ReferenceSequence;
use noodles::csi::Index;
use noodles::csi::binning_index::index::reference_sequence::index::LinearIndex;
use noodles::csi::binning_index::index::ReferenceSequence;
use noodles::csi::BinningIndex;
use noodles::tabix;
use noodles::tabix::Index;
use noodles::vcf;
use noodles::vcf::Header;
use tokio::io;
Expand All @@ -30,7 +32,8 @@ pub struct VcfSearch<S> {
}

#[async_trait]
impl<S, ReaderType> BgzfSearch<S, ReaderType, AsyncReader<ReaderType>, Header> for VcfSearch<S>
impl<S, ReaderType> BgzfSearch<S, LinearIndex, ReaderType, AsyncReader<ReaderType>, Header>
for VcfSearch<S>
where
S: Storage<Streamable = ReaderType> + Send + Sync + 'static,
ReaderType: AsyncRead + Unpin + Send + Sync,
Expand All @@ -48,7 +51,8 @@ where
}

#[async_trait]
impl<S, ReaderType> Search<S, ReaderType, ReferenceSequence, Index, AsyncReader<ReaderType>, Header>
impl<S, ReaderType>
Search<S, ReaderType, ReferenceSequence<LinearIndex>, Index, AsyncReader<ReaderType>, Header>
for VcfSearch<S>
where
S: Storage<Streamable = ReaderType> + Send + Sync + 'static,
Expand Down
2 changes: 1 addition & 1 deletion htsget-test/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ default = []
# Server tests dependencies
htsget-config = { version = "0.8.0", path = "../htsget-config", default-features = false, optional = true }

noodles = { version = "0.50", optional = true, features = ["async", "bgzf", "vcf"] }
noodles = { version = "0.60", optional = true, features = ["async", "bgzf", "vcf"] }

reqwest = { version = "0.11", default-features = false, features = ["json", "rustls-tls"], optional = true }
tokio = { version = "1", features = ["rt-multi-thread"], optional = true }
Expand Down

0 comments on commit 442c195

Please sign in to comment.