Skip to content
This repository has been archived by the owner on Jan 15, 2025. It is now read-only.

Switch to using new skopeo proxy code #118

Merged
merged 1 commit into from
Oct 14, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 13 additions & 6 deletions ci/installdeps.sh
Original file line number Diff line number Diff line change
@@ -1,10 +1,17 @@
#!/bin/bash
set -xeuo pipefail

yum -y install skopeo
yum -y --enablerepo=updates-testing update ostree-devel
# Always pull ostree from updates-testing to avoid the bodhi wait
dnf -y --enablerepo=updates-testing update ostree-devel

# Pull the code from https://github.com/containers/skopeo/pull/1476
# if necessary.
if ! skopeo experimental-image-proxy --help &>/dev/null; then
dnf -y install dnf-utils
dnf builddep -y skopeo
git clone --depth=1 https://github.com/containers/skopeo
cd skopeo
make
install -m 0755 bin/skopeo /usr/bin/
fi

git clone --depth=1 https://github.com/cgwalters/container-image-proxy
cd container-image-proxy
make
install -m 0755 bin/container-image-proxy /usr/bin/
2 changes: 1 addition & 1 deletion lib/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ version = "0.4.0-alpha.0"

[dependencies]
anyhow = "1.0"
containers-image-proxy = { version = "0.1", git = "https://github.com/cgwalters/containers-image-proxy-rs" }
async-compression = { version = "0.3", features = ["gzip", "tokio"] }
bytes = "1.0.1"
bitflags = "1"
Expand All @@ -20,7 +21,6 @@ fn-error-context = "0.2.0"
futures-util = "0.3.13"
gvariant = "0.4.0"
hex = "0.4.3"
hyper = { version = "0.14", features = ["full"] }
indicatif = "0.16.0"
lazy_static = "1.4.0"
libc = "0.2.92"
Expand Down
157 changes: 0 additions & 157 deletions lib/src/container/imageproxy.rs

This file was deleted.

60 changes: 51 additions & 9 deletions lib/src/container/import.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,11 @@

use super::*;
use anyhow::{anyhow, Context};
use containers_image_proxy::{ImageProxy, OpenedImage};
use containers_image_proxy::{OCI_TYPE_LAYER_GZIP, OCI_TYPE_LAYER_TAR};
use fn_error_context::context;
use tokio::io::AsyncRead;
use futures_util::Future;
use tokio::io::{AsyncBufRead, AsyncRead};
use tracing::{event, instrument, Level};

/// The result of an import operation
Expand Down Expand Up @@ -84,8 +87,10 @@ impl<T: AsyncRead> AsyncRead for ProgressReader<T> {
/// Download the manifest for a target image and its sha256 digest.
#[context("Fetching manifest")]
pub async fn fetch_manifest(imgref: &OstreeImageReference) -> Result<(Vec<u8>, String)> {
let mut proxy = imageproxy::ImageProxy::new(&imgref.imgref).await?;
let (digest, raw_manifest) = proxy.fetch_manifest().await?;
let proxy = ImageProxy::new().await?;
let oi = &proxy.open_image(&imgref.imgref.to_string()).await?;
let (digest, raw_manifest) = proxy.fetch_manifest(oi).await?;
proxy.close_image(oi).await?;
Ok((raw_manifest, digest))
}

Expand Down Expand Up @@ -135,6 +140,36 @@ pub async fn import(
})
}

/// Create a decompressor for this MIME type, given a stream of input.
fn new_async_decompressor<'a>(
media_type: &str,
src: impl AsyncBufRead + Send + Unpin + 'a,
) -> Result<Box<dyn AsyncBufRead + Send + Unpin + 'a>> {
match media_type {
OCI_TYPE_LAYER_GZIP => Ok(Box::new(tokio::io::BufReader::new(
async_compression::tokio::bufread::GzipDecoder::new(src),
))),
OCI_TYPE_LAYER_TAR => Ok(Box::new(src)),
o => Err(anyhow::anyhow!("Unhandled layer type: {}", o)),
}
}

/// A wrapper for [`get_blob`] which fetches a layer and decompresses it.
pub(crate) async fn fetch_layer_decompress<'a>(
proxy: &'a ImageProxy,
img: &OpenedImage,
layer: &oci::ManifestLayer,
) -> Result<(
Box<dyn AsyncBufRead + Send + Unpin>,
impl Future<Output = Result<()>> + 'a,
)> {
let (blob, driver) = proxy
.get_blob(img, layer.digest.as_str(), layer.size)
.await?;
let blob = new_async_decompressor(&layer.media_type, blob)?;
Ok((blob, driver))
}

/// Fetch a container image using an in-memory manifest and import its embedded OSTree commit.
#[context("Importing {}", imgref)]
#[instrument(skip(repo, options, manifest_bytes))]
Expand All @@ -152,9 +187,15 @@ pub async fn import_from_manifest(
let options = options.unwrap_or_default();
let manifest: oci::Manifest = serde_json::from_slice(manifest_bytes)?;
let layer = require_one_layer_blob(&manifest)?;
event!(Level::DEBUG, "target blob: {}", layer.digest.as_str());
let mut proxy = imageproxy::ImageProxy::new(&imgref.imgref).await?;
let blob = proxy.fetch_layer_decompress(layer).await?;
event!(
Level::DEBUG,
"target blob digest:{} size: {}",
layer.digest.as_str(),
layer.size
);
let proxy = ImageProxy::new().await?;
let oi = &proxy.open_image(&imgref.imgref.to_string()).await?;
let (blob, driver) = fetch_layer_decompress(&proxy, oi, layer).await?;
let blob = ProgressReader {
reader: blob,
progress: options.progress,
Expand All @@ -164,9 +205,10 @@ pub async fn import_from_manifest(
SignatureSource::OstreeRemote(remote) => taropts.remote = Some(remote.clone()),
SignatureSource::ContainerPolicy | SignatureSource::ContainerPolicyAllowInsecure => {}
}
let ostree_commit = crate::tar::import_tar(repo, blob, Some(taropts))
.await
.with_context(|| format!("Parsing blob {}", layer.digest))?;
let import = crate::tar::import_tar(repo, blob, Some(taropts));
let (import, driver) = tokio::join!(import, driver);
driver?;
let ostree_commit = import.with_context(|| format!("Parsing blob {}", layer.digest))?;
// FIXME write ostree commit after proxy finalization
proxy.finalize().await?;
event!(Level::DEBUG, "created commit {}", ostree_commit);
Expand Down
1 change: 0 additions & 1 deletion lib/src/container/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,6 @@ mod export;
pub use export::*;
mod import;
pub use import::*;
mod imageproxy;
mod oci;
mod skopeo;
pub mod store;
Expand Down
22 changes: 1 addition & 21 deletions lib/src/container/oci.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
//! oriented towards generating images.

use anyhow::{anyhow, Result};
use containers_image_proxy::OCI_TYPE_LAYER_GZIP;
use flate2::write::GzEncoder;
use fn_error_context::context;
use openat_ext::*;
Expand All @@ -10,7 +11,6 @@ use phf::phf_map;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::io::prelude::*;
use tokio::io::AsyncBufRead;

/// Map the value from `uname -m` to the Go architecture.
/// TODO find a more canonical home for this.
Expand All @@ -22,10 +22,6 @@ static MACHINE_TO_OCI: phf::Map<&str, &str> = phf_map! {
// OCI types, see https://github.com/opencontainers/image-spec/blob/master/media-types.md
pub(crate) const OCI_TYPE_CONFIG_JSON: &str = "application/vnd.oci.image.config.v1+json";
pub(crate) const OCI_TYPE_MANIFEST_JSON: &str = "application/vnd.oci.image.manifest.v1+json";
pub(crate) const OCI_TYPE_LAYER_GZIP: &str = "application/vnd.oci.image.layer.v1.tar+gzip";
pub(crate) const OCI_TYPE_LAYER_TAR: &str = "application/vnd.oci.image.layer.v1.tar";
// FIXME - use containers/image to fully convert the manifest to OCI
const DOCKER_TYPE_LAYER_TARGZ: &str = "application/vnd.docker.image.rootfs.diff.tar.gzip";

/// Path inside an OCI directory to the blobs
const BLOBDIR: &str = "blobs/sha256";
Expand Down Expand Up @@ -68,22 +64,6 @@ pub(crate) struct ManifestLayer {
pub size: u64,
}

impl ManifestLayer {
/// Create a decompressor for this layer, given a stream of input.
pub fn new_async_decompressor(
&self,
src: impl AsyncBufRead + Send + Unpin + 'static,
) -> Result<Box<dyn AsyncBufRead + Send + Unpin + 'static>> {
match self.media_type.as_str() {
OCI_TYPE_LAYER_GZIP | DOCKER_TYPE_LAYER_TARGZ => Ok(Box::new(
tokio::io::BufReader::new(async_compression::tokio::bufread::GzipDecoder::new(src)),
)),
OCI_TYPE_LAYER_TAR => Ok(Box::new(src)),
o => Err(anyhow::anyhow!("Unhandled layer type: {}", o)),
}
}
}

#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct Manifest {
Expand Down
Loading