Skip to content

Commit

Permalink
Merge pull request #223 from avast/fix/gcs_null_metadata
Browse files Browse the repository at this point in the history
fix: GCS backend - Correctly handle missing (null) Blob metadata
  • Loading branch information
mi-char authored Dec 20, 2023
2 parents b000303 + 1b2f0c2 commit 1a5ce97
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import cats.effect.implicits.catsEffectSyntaxBracket
import cats.effect.{Blocker, ContextShift, Resource, Sync}
import cats.syntax.all._
import com.avast.clients.storage.compression.ZstdDecompressOutputStream
import com.avast.clients.storage.gcs.GcsStorageBackend.composeBlobPath
import com.avast.clients.storage.gcs.GcsStorageBackend.{composeBlobPath, getMetadataValue, CompressionTypeHeader, OriginalSizeHeader}
import com.avast.clients.storage.{ConfigurationException, GetResult, HeadResult, StorageBackend, StorageException}
import com.avast.scala.hashes.Sha256
import com.google.auth.oauth2.ServiceAccountCredentials
Expand Down Expand Up @@ -35,10 +35,10 @@ class GcsStorageBackend[F[_]: Sync: ContextShift](storageClient: Storage, bucket
blob <- getBlob(sha256)
result = blob match {
case Some(blob) =>
blob.getMetadata.get(GcsStorageBackend.OriginalSizeHeader) match {
case null =>
getMetadataValue(blob, OriginalSizeHeader) match {
case None =>
HeadResult.Exists(blob.getSize)
case originalSize =>
case Some(originalSize) =>
HeadResult.Exists(originalSize.toLong)
}
case None =>
Expand Down Expand Up @@ -108,9 +108,6 @@ class GcsStorageBackend[F[_]: Sync: ContextShift](storageClient: Storage, bucket
}

private def downloadBlobToFile(blob: Blob, fileStream: OutputStream): F[(Long, Sha256)] = {
def getCompressionType: Option[String] = {
Option(blob.getMetadata.get(GcsStorageBackend.CompressionTypeHeader)).map(_.toLowerCase)
}

Sync[F]
.delay {
Expand All @@ -120,7 +117,7 @@ class GcsStorageBackend[F[_]: Sync: ContextShift](storageClient: Storage, bucket
}
.bracket {
case (countingStream, hashingStream) => {
getCompressionType match {
getMetadataValue(blob, CompressionTypeHeader) match {
case None =>
downloadBlobToStream(blob, hashingStream)
case Some("zstd") =>
Expand Down Expand Up @@ -196,6 +193,10 @@ object GcsStorageBackend {
String.join("/", sha256Hex.substring(0, 2), sha256Hex.substring(2, 4), sha256Hex.substring(4, 6), sha256Hex)
}

private[gcs] def getMetadataValue(blob: Blob, key: String): Option[String] = {
Option(blob.getMetadata).flatMap(m => Option(m.get(key)))
}

private[gcs] class CountingOutputStream(target: OutputStream) extends OutputStream {
private var count: Long = 0

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ class GcsStorageBackendTest extends FunSuite with ScalaFutures with MockitoSugar

val blob = mock[Blob]
when(blob.getSize).thenReturn(fileSize.toLong)
when(blob.getMetadata).thenReturn(null)

val storageClient = mock[Storage]
when(storageClient.get(any[BlobId]())).thenAnswer { call =>
Expand Down Expand Up @@ -100,6 +101,7 @@ class GcsStorageBackendTest extends FunSuite with ScalaFutures with MockitoSugar
val bucketName = "bucket-tst"

val blob = mock[Blob]
when(blob.getMetadata).thenReturn(null)
when(blob.downloadTo(any[OutputStream]())).thenAnswer { call =>
val outputStream = call.getArgument[OutputStream](0)
outputStream.write(content.getBytes())
Expand Down

0 comments on commit 1a5ce97

Please sign in to comment.