Skip to content

Commit

Permalink
Restore #6645 backwards compat
Browse files Browse the repository at this point in the history
  • Loading branch information
jgallagher committed Dec 4, 2024
1 parent e807ea9 commit 02518cf
Show file tree
Hide file tree
Showing 4 changed files with 165 additions and 41 deletions.
48 changes: 40 additions & 8 deletions nexus/reconfigurator/planning/src/blueprint_builder/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
//! Low-level facility for generating Blueprints
use crate::blueprint_editor::EditedSled;
use crate::blueprint_editor::PreexistingDatasetIds;
use crate::blueprint_editor::SledEditError;
use crate::blueprint_editor::SledEditor;
use crate::ip_allocator::IpAllocator;
Expand Down Expand Up @@ -38,6 +39,7 @@ use nexus_types::deployment::OmicronZoneExternalSnatIp;
use nexus_types::deployment::PlanningInput;
use nexus_types::deployment::SledDetails;
use nexus_types::deployment::SledFilter;
use nexus_types::deployment::SledLookupErrorKind;
use nexus_types::deployment::SledResources;
use nexus_types::deployment::ZpoolFilter;
use nexus_types::deployment::ZpoolName;
Expand Down Expand Up @@ -469,6 +471,30 @@ impl<'a> BlueprintBuilder<'a> {
"parent_id" => parent_blueprint.id.to_string(),
));

// Helper to build a `PreexistingDatasetIds` for a given sled. This will
// go away with https://github.com/oxidecomputer/omicron/issues/6645.
let build_preexisting_dataset_ids = |sled_id| -> anyhow::Result<
PreexistingDatasetIds,
> {
match input.sled_lookup(SledFilter::All, sled_id) {
Ok(details) => PreexistingDatasetIds::build(&details.resources)
.with_context(|| {
format!(
"failed building map of preexisting \
dataset IDs for sled {sled_id}"
)
}),
Err(err) => match err.kind() {
SledLookupErrorKind::Missing => {
Ok(PreexistingDatasetIds::empty())
}
SledLookupErrorKind::Filtered { .. } => unreachable!(
"SledFilter::All should not filter anything out"
),
},
}
};

// Squish the disparate maps in our parent blueprint into one map of
// `SledEditor`s.
let mut sled_editors = BTreeMap::new();
Expand Down Expand Up @@ -508,21 +534,27 @@ impl<'a> BlueprintBuilder<'a> {
generation: Generation::new(),
datasets: BTreeMap::new(),
});
let editor =
SledEditor::new(state, zones.clone(), disks, datasets.clone())
.with_context(|| {
format!(
"failed to construct SledEditor for sled {sled_id}"
)
})?;
let editor = SledEditor::new(
state,
zones.clone(),
disks,
datasets.clone(),
build_preexisting_dataset_ids(*sled_id)?,
)
.with_context(|| {
format!("failed to construct SledEditor for sled {sled_id}")
})?;
sled_editors.insert(*sled_id, editor);
}

// Add new, empty `SledEditor`s for any commissioned sleds in our input
// that weren't in the parent blueprint. (These are newly-added sleds.)
for sled_id in input.all_sled_ids(SledFilter::Commissioned) {
if let Entry::Vacant(slot) = sled_editors.entry(sled_id) {
slot.insert(SledEditor::new_empty(SledState::Active));
slot.insert(SledEditor::new_empty(
SledState::Active,
build_preexisting_dataset_ids(sled_id)?,
));
}
}

Expand Down
1 change: 1 addition & 0 deletions nexus/reconfigurator/planning/src/blueprint_editor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,4 @@ mod sled_editor;
pub(crate) use sled_editor::EditedSled;
pub(crate) use sled_editor::SledEditor;
pub(crate) use sled_editor::SledEditError;
pub(crate) use sled_editor::PreexistingDatasetIds;
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ mod datasets;
mod disks;
mod zones;

pub(crate) use self::datasets::PreexistingDatasetIds;

pub use self::datasets::DatasetsEditError;
pub use self::datasets::MultipleDatasetsOfKind;
pub use self::disks::DisksEditError;
Expand Down Expand Up @@ -95,21 +97,25 @@ impl SledEditor {
zones: BlueprintZonesConfig,
disks: BlueprintPhysicalDisksConfig,
datasets: BlueprintDatasetsConfig,
preexisting_dataset_ids: PreexistingDatasetIds,
) -> Result<Self, SledInputError> {
Ok(Self {
state,
zones: zones.try_into()?,
disks: disks.try_into()?,
datasets: datasets.try_into()?,
datasets: DatasetsEditor::new(datasets, preexisting_dataset_ids)?,
})
}

pub fn new_empty(state: SledState) -> Self {
pub fn new_empty(
state: SledState,
preexisting_dataset_ids: PreexistingDatasetIds,
) -> Self {
Self {
state,
zones: ZonesEditor::empty(),
disks: DisksEditor::empty(),
datasets: DatasetsEditor::empty(),
datasets: DatasetsEditor::empty(preexisting_dataset_ids),
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ use illumos_utils::zpool::ZpoolName;
use nexus_types::deployment::BlueprintDatasetConfig;
use nexus_types::deployment::BlueprintDatasetDisposition;
use nexus_types::deployment::BlueprintDatasetsConfig;
use nexus_types::deployment::SledResources;
use nexus_types::deployment::ZpoolFilter;
use omicron_common::api::external::ByteCount;
use omicron_common::api::external::Generation;
use omicron_common::disk::CompressionAlgorithm;
Expand Down Expand Up @@ -38,6 +40,73 @@ pub enum DatasetsEditError {
ExpungeNonexistentDataset { id: DatasetUuid },
}

/// TODO(https://github.com/oxidecomputer/omicron/issues/6645): In between
/// the addition of datasets to blueprints and knowing all deployed system
/// have _generated_ a blueprint that populates datasets, we are in a sticky
/// situation where a dataset might have already existed in CRDB with an ID,
/// but the blueprint system doesn't know about it. We accept a map of all
/// existing dataset IDs, and then when determining the ID of a dataset,
/// we'll try these in order:
///
/// 1. Is the dataset in our blueprint already? If so, use its ID.
/// 2. Is the dataset in `preexisting_database_ids`? If so, use that ID.
/// 3. Generate a new random ID.
#[derive(Debug)]
pub(crate) struct PreexistingDatasetIds(
BTreeMap<ZpoolUuid, BTreeMap<DatasetKind, DatasetUuid>>,
);

impl PreexistingDatasetIds {
pub fn build(
resources: &SledResources,
) -> Result<Self, MultipleDatasetsOfKind> {
let iter = resources.all_datasets(ZpoolFilter::InService).flat_map(
|(&zpool_id, configs)| {
configs.iter().map(move |config| {
(zpool_id, config.name.dataset().clone(), config.id)
})
},
);

let mut kind_id_map: BTreeMap<
ZpoolUuid,
BTreeMap<DatasetKind, DatasetUuid>,
> = BTreeMap::new();

for (zpool_id, kind, dataset_id) in iter {
let dataset_ids_by_kind = kind_id_map.entry(zpool_id).or_default();
match dataset_ids_by_kind.entry(kind) {
Entry::Vacant(slot) => {
slot.insert(dataset_id);
}
Entry::Occupied(prev) => {
return Err(MultipleDatasetsOfKind {
zpool_id,
kind: prev.key().clone(),
id1: *prev.get(),
id2: dataset_id,
});
}
}
}
Ok(Self(kind_id_map))
}

pub fn empty() -> Self {
Self(BTreeMap::new())
}
}

impl PreexistingDatasetIds {
fn get(
&self,
zpool_id: &ZpoolUuid,
kind: &DatasetKind,
) -> Option<DatasetUuid> {
self.0.get(zpool_id).and_then(|by_kind| by_kind.get(kind).copied())
}
}

#[derive(Debug)]
pub(crate) struct PartialDatasetConfig {
pub name: DatasetName,
Expand Down Expand Up @@ -133,14 +202,46 @@ impl PartialDatasetConfig {

#[derive(Debug)]
pub(super) struct DatasetsEditor {
preexisting_dataset_ids: PreexistingDatasetIds,
config: BlueprintDatasetsConfig,
by_zpool_and_kind: BTreeMap<ZpoolUuid, BTreeMap<DatasetKind, DatasetUuid>>,
counts: EditCounts,
}

impl DatasetsEditor {
pub fn empty() -> Self {
pub fn new(
config: BlueprintDatasetsConfig,
preexisting_dataset_ids: PreexistingDatasetIds,
) -> Result<Self, MultipleDatasetsOfKind> {
let mut by_zpool_and_kind = BTreeMap::new();
for dataset in config.datasets.values() {
let by_kind: &mut BTreeMap<_, _> =
by_zpool_and_kind.entry(dataset.pool.id()).or_default();
match by_kind.entry(dataset.kind.clone()) {
Entry::Vacant(slot) => {
slot.insert(dataset.id);
}
Entry::Occupied(prev) => {
return Err(MultipleDatasetsOfKind {
zpool_id: dataset.pool.id(),
kind: dataset.kind.clone(),
id1: *prev.get(),
id2: dataset.id,
});
}
}
}
Ok(Self {
preexisting_dataset_ids,
config,
by_zpool_and_kind,
counts: EditCounts::zeroes(),
})
}

pub fn empty(preexisting_dataset_ids: PreexistingDatasetIds) -> Self {
Self {
preexisting_dataset_ids,
config: BlueprintDatasetsConfig {
generation: Generation::new(),
datasets: BTreeMap::new(),
Expand Down Expand Up @@ -169,9 +270,19 @@ impl DatasetsEditor {
zpool: &ZpoolUuid,
kind: &DatasetKind,
) -> Option<DatasetUuid> {
let by_kind = self.by_zpool_and_kind.get(zpool)?;
let id = by_kind.get(kind).copied()?;
Some(id)
if let Some(blueprint_id) = self
.by_zpool_and_kind
.get(zpool)
.and_then(|by_kind| by_kind.get(kind).copied())
{
return Some(blueprint_id);
};
if let Some(preexisting_database_id) =
self.preexisting_dataset_ids.get(zpool, kind)
{
return Some(preexisting_database_id);
};
None
}

pub fn expunge(
Expand Down Expand Up @@ -231,29 +342,3 @@ impl DatasetsEditor {
}
}
}

impl TryFrom<BlueprintDatasetsConfig> for DatasetsEditor {
type Error = MultipleDatasetsOfKind;

fn try_from(config: BlueprintDatasetsConfig) -> Result<Self, Self::Error> {
let mut by_zpool_and_kind = BTreeMap::new();
for dataset in config.datasets.values() {
let by_kind: &mut BTreeMap<_, _> =
by_zpool_and_kind.entry(dataset.pool.id()).or_default();
match by_kind.entry(dataset.kind.clone()) {
Entry::Vacant(slot) => {
slot.insert(dataset.id);
}
Entry::Occupied(prev) => {
return Err(MultipleDatasetsOfKind {
zpool_id: dataset.pool.id(),
kind: dataset.kind.clone(),
id1: *prev.get(),
id2: dataset.id,
});
}
}
}
Ok(Self { config, by_zpool_and_kind, counts: EditCounts::zeroes() })
}
}

0 comments on commit 02518cf

Please sign in to comment.