component_id_index/
lib.rs

1// Copyright 2020 The Fuchsia Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5// This library must remain platform-agnostic because it used by a host tool and within Fuchsia.
6
7use anyhow::Context;
8use camino::{Utf8Path, Utf8PathBuf};
9use clonable_error::ClonableError;
10use fidl_fuchsia_component_internal as fcomponent_internal;
11use moniker::Moniker;
12use std::collections::{HashMap, HashSet};
13use thiserror::Error;
14
15#[cfg(feature = "serde")]
16use serde::{Deserialize, Serialize};
17
18pub mod fidl_convert;
19mod instance_id;
20
21pub use instance_id::{InstanceId, InstanceIdError};
22
23/// Component ID index entry, only used for persistence to JSON5 and FIDL..
24#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))]
25#[derive(Debug, PartialEq, Eq, Clone)]
26pub(crate) struct PersistedIndexEntry {
27    pub instance_id: InstanceId,
28    pub moniker: Moniker,
29}
30
31/// Component ID index, only used for persistence to JSON5 and FIDL.
32///
33/// Unlike [Index], this type is not validated, so may contain duplicate monikers
34/// and instance IDs.
35#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))]
36#[derive(Debug, PartialEq, Eq, Clone)]
37pub(crate) struct PersistedIndex {
38    instances: Vec<PersistedIndexEntry>,
39}
40
41/// An index that maps component monikers to instance IDs.
42///
43/// Unlike [PersistedIndex], this type is validated to only contain unique instance IDs.
44#[cfg_attr(
45    feature = "serde",
46    derive(Deserialize, Serialize),
47    serde(try_from = "PersistedIndex", into = "PersistedIndex")
48)]
49#[derive(Debug, PartialEq, Eq, Clone)]
50pub struct Index {
51    /// Map of a moniker from the index to its instance ID.
52    moniker_to_instance_id: HashMap<Moniker, InstanceId>,
53
54    /// All instance IDs, equivalent to the values of `moniker_to_instance_id`.
55    instance_ids: HashSet<InstanceId>,
56}
57
58#[derive(Error, Clone, Debug)]
59pub enum IndexError {
60    #[error("failed to read index file '{path}'")]
61    ReadFile {
62        #[source]
63        err: ClonableError,
64        path: Utf8PathBuf,
65    },
66    #[error("invalid index")]
67    ValidationError(#[from] ValidationError),
68    #[error("could not merge indices")]
69    MergeError(#[from] MergeError),
70    #[error("could not convert FIDL index")]
71    FidlConversionError(#[from] fidl_convert::FidlConversionError),
72}
73
74impl Index {
75    /// Return an Index parsed from the FIDL file at `path`.
76    pub fn from_fidl_file(path: &Utf8Path) -> Result<Self, IndexError> {
77        fn fidl_index_from_file(
78            path: &Utf8Path,
79        ) -> Result<fcomponent_internal::ComponentIdIndex, anyhow::Error> {
80            let raw_content = std::fs::read(path).context("failed to read file")?;
81            let fidl_index = fidl::unpersist::<fcomponent_internal::ComponentIdIndex>(&raw_content)
82                .context("failed to unpersist FIDL")?;
83            Ok(fidl_index)
84        }
85        let fidl_index = fidl_index_from_file(path)
86            .map_err(|err| IndexError::ReadFile { err: err.into(), path: path.to_owned() })?;
87        let index = Index::try_from(fidl_index)?;
88        Ok(index)
89    }
90
91    /// Construct an Index by merging JSON5 source files.
92    #[cfg(feature = "serde")]
93    pub fn merged_from_json5_files(paths: &[Utf8PathBuf]) -> Result<Self, IndexError> {
94        fn index_from_json5_file(path: &Utf8Path) -> Result<Index, anyhow::Error> {
95            let mut file = std::fs::File::open(&path).context("failed to open")?;
96            let index: Index = serde_json5::from_reader(&mut file).context("failed to parse")?;
97            Ok(index)
98        }
99        let mut ctx = MergeContext::default();
100        for path in paths {
101            let index = index_from_json5_file(path)
102                .map_err(|err| IndexError::ReadFile { err: err.into(), path: path.to_owned() })?;
103            ctx.merge(path, &index)?;
104        }
105        Ok(ctx.output())
106    }
107
108    /// Insert an entry into the index.
109    pub fn insert(
110        &mut self,
111        moniker: Moniker,
112        instance_id: InstanceId,
113    ) -> Result<(), ValidationError> {
114        if !self.instance_ids.insert(instance_id.clone()) {
115            return Err(ValidationError::DuplicateId(instance_id));
116        }
117        if self.moniker_to_instance_id.insert(moniker.clone(), instance_id).is_some() {
118            return Err(ValidationError::DuplicateMoniker(moniker));
119        }
120        Ok(())
121    }
122
123    /// Returns the instance ID for the moniker, if the index contains the moniker.
124    pub fn id_for_moniker(&self, moniker: &Moniker) -> Option<&InstanceId> {
125        self.moniker_to_instance_id.get(&moniker)
126    }
127
128    /// Returns true if the index contains the instance ID.
129    pub fn contains_id(&self, id: &InstanceId) -> bool {
130        self.instance_ids.contains(id)
131    }
132}
133
134impl Default for Index {
135    fn default() -> Self {
136        Index { moniker_to_instance_id: HashMap::new(), instance_ids: HashSet::new() }
137    }
138}
139
140impl TryFrom<PersistedIndex> for Index {
141    type Error = ValidationError;
142
143    fn try_from(value: PersistedIndex) -> Result<Self, Self::Error> {
144        let mut index = Index::default();
145        for entry in value.instances.into_iter() {
146            index.insert(entry.moniker, entry.instance_id)?;
147        }
148        Ok(index)
149    }
150}
151
152impl From<Index> for PersistedIndex {
153    fn from(value: Index) -> Self {
154        let mut instances = value
155            .moniker_to_instance_id
156            .into_iter()
157            .map(|(moniker, instance_id)| PersistedIndexEntry { instance_id, moniker })
158            .collect::<Vec<_>>();
159        instances.sort_by(|a, b| a.moniker.cmp(&b.moniker));
160        Self { instances }
161    }
162}
163
164#[derive(Error, Debug, Clone, PartialEq)]
165pub enum ValidationError {
166    #[error("duplicate moniker: {}", .0)]
167    DuplicateMoniker(Moniker),
168    #[error("duplicate instance ID: {}", .0)]
169    DuplicateId(InstanceId),
170}
171
172#[derive(Error, Debug, Clone, PartialEq)]
173pub enum MergeError {
174    #[error("Moniker {}' must be unique but exists in following index files:\n {}\n {}", .moniker, .source1, .source2)]
175    DuplicateMoniker { moniker: Moniker, source1: Utf8PathBuf, source2: Utf8PathBuf },
176    #[error("Instance ID '{}' must be unique but exists in following index files:\n {}\n {}", .instance_id, .source1, .source2)]
177    DuplicateId { instance_id: InstanceId, source1: Utf8PathBuf, source2: Utf8PathBuf },
178}
179
180/// A builder that merges indices into a single accumulated index.
181pub struct MergeContext {
182    /// Index that contains entries accumulated from calls to [`merge()`].
183    output_index: Index,
184    // Path to the source index file that contains the moniker.
185    moniker_to_source_path: HashMap<Moniker, Utf8PathBuf>,
186    // Path to the source index file that contains the instance ID.
187    instance_id_to_source_path: HashMap<InstanceId, Utf8PathBuf>,
188}
189
190impl MergeContext {
191    // Merge `index` into the into the MergeContext.
192    //
193    // This method can be called multiple times to merge multiple indices.
194    // The resulting index can be accessed with output().
195    pub fn merge(&mut self, source_index_path: &Utf8Path, index: &Index) -> Result<(), MergeError> {
196        for (moniker, instance_id) in &index.moniker_to_instance_id {
197            self.output_index.insert(moniker.clone(), instance_id.clone()).map_err(
198                |err| match err {
199                    ValidationError::DuplicateMoniker(moniker) => {
200                        let previous_source_path =
201                            self.moniker_to_source_path.get(&moniker).cloned().unwrap_or_default();
202                        MergeError::DuplicateMoniker {
203                            moniker,
204                            source1: previous_source_path,
205                            source2: source_index_path.to_owned(),
206                        }
207                    }
208                    ValidationError::DuplicateId(instance_id) => {
209                        let previous_source_path = self
210                            .instance_id_to_source_path
211                            .get(&instance_id)
212                            .cloned()
213                            .unwrap_or_default();
214                        MergeError::DuplicateId {
215                            instance_id,
216                            source1: previous_source_path,
217                            source2: source_index_path.to_owned(),
218                        }
219                    }
220                },
221            )?;
222            self.instance_id_to_source_path
223                .insert(instance_id.clone(), source_index_path.to_owned());
224            self.moniker_to_source_path.insert(moniker.clone(), source_index_path.to_owned());
225        }
226        Ok(())
227    }
228
229    // Return the accumulated index from calls to merge().
230    pub fn output(self) -> Index {
231        self.output_index
232    }
233}
234
235impl Default for MergeContext {
236    fn default() -> Self {
237        MergeContext {
238            output_index: Index::default(),
239            instance_id_to_source_path: HashMap::new(),
240            moniker_to_source_path: HashMap::new(),
241        }
242    }
243}
244
245#[cfg(test)]
246mod tests {
247    use super::*;
248    use anyhow::Result;
249
250    #[test]
251    fn merge_empty_index() {
252        let ctx = MergeContext::default();
253        assert_eq!(ctx.output(), Index::default());
254    }
255
256    #[test]
257    fn merge_single_index() -> Result<()> {
258        let mut ctx = MergeContext::default();
259
260        let mut index = Index::default();
261        let moniker = vec!["foo"].try_into().unwrap();
262        let instance_id = InstanceId::new_random(&mut rand::thread_rng());
263        index.insert(moniker, instance_id).unwrap();
264
265        ctx.merge(Utf8Path::new("/random/file/path"), &index)?;
266        assert_eq!(ctx.output(), index.clone());
267        Ok(())
268    }
269
270    #[test]
271    fn merge_duplicate_id() -> Result<()> {
272        let source1 = Utf8Path::new("/a/b/c");
273        let source2 = Utf8Path::new("/d/e/f");
274
275        let id = InstanceId::new_random(&mut rand::thread_rng());
276        let index1 = {
277            let mut index = Index::default();
278            let moniker = vec!["foo"].try_into().unwrap();
279            index.insert(moniker, id.clone()).unwrap();
280            index
281        };
282        let index2 = {
283            let mut index = Index::default();
284            let moniker = vec!["bar"].try_into().unwrap();
285            index.insert(moniker, id.clone()).unwrap();
286            index
287        };
288
289        let mut ctx = MergeContext::default();
290        ctx.merge(source1, &index1)?;
291
292        let err = ctx.merge(source2, &index2).unwrap_err();
293        assert_eq!(
294            err,
295            MergeError::DuplicateId {
296                instance_id: id,
297                source1: source1.to_owned(),
298                source2: source2.to_owned()
299            }
300        );
301
302        Ok(())
303    }
304
305    #[test]
306    fn merge_duplicate_moniker() -> Result<()> {
307        let source1 = Utf8Path::new("/a/b/c");
308        let source2 = Utf8Path::new("/d/e/f");
309
310        let moniker: Moniker = vec!["foo"].try_into().unwrap();
311        let index1 = {
312            let mut index = Index::default();
313            let id = InstanceId::new_random(&mut rand::thread_rng());
314            index.insert(moniker.clone(), id).unwrap();
315            index
316        };
317        let index2 = {
318            let mut index = Index::default();
319            let id = InstanceId::new_random(&mut rand::thread_rng());
320            index.insert(moniker.clone(), id).unwrap();
321            index
322        };
323
324        let mut ctx = MergeContext::default();
325        ctx.merge(source1, &index1)?;
326
327        let err = ctx.merge(source2, &index2).unwrap_err();
328        assert_eq!(
329            err,
330            MergeError::DuplicateMoniker {
331                moniker,
332                source1: source1.to_owned(),
333                source2: source2.to_owned()
334            }
335        );
336
337        Ok(())
338    }
339
340    #[cfg(feature = "serde")]
341    #[test]
342    fn merged_from_json5_files() {
343        use std::io::Write;
344
345        let mut index_file_1 = tempfile::NamedTempFile::new().unwrap();
346        index_file_1
347            .write_all(
348                r#"{
349            // Here is a comment.
350            instances: [
351                {
352                    instance_id: "fb94044d62278b37c221c7fdeebdcf1304262f3e11416f68befa5ef88b7a2163",
353                    moniker: "/a/b"
354                }
355            ]
356        }"#
357                .as_bytes(),
358            )
359            .unwrap();
360
361        let mut index_file_2 = tempfile::NamedTempFile::new().unwrap();
362        index_file_2
363            .write_all(
364                r#"{
365            // Here is a comment.
366            instances: [
367                {
368                    instance_id: "4f915af6c4b682867ab7ad2dc9cbca18342ddd9eec61724f19d231cf6d07f122",
369                    moniker: "/c/d"
370                }
371            ]
372        }"#
373                .as_bytes(),
374            )
375            .unwrap();
376
377        let expected_index = {
378            let mut index = Index::default();
379            index
380                .insert(
381                    "/a/b".parse::<Moniker>().unwrap(),
382                    "fb94044d62278b37c221c7fdeebdcf1304262f3e11416f68befa5ef88b7a2163"
383                        .parse::<InstanceId>()
384                        .unwrap(),
385                )
386                .unwrap();
387            index
388                .insert(
389                    "/c/d".parse::<Moniker>().unwrap(),
390                    "4f915af6c4b682867ab7ad2dc9cbca18342ddd9eec61724f19d231cf6d07f122"
391                        .parse::<InstanceId>()
392                        .unwrap(),
393                )
394                .unwrap();
395            index
396        };
397
398        // only checking that we parsed successfully.
399        let files = [
400            Utf8PathBuf::from_path_buf(index_file_1.path().to_path_buf()).unwrap(),
401            Utf8PathBuf::from_path_buf(index_file_2.path().to_path_buf()).unwrap(),
402        ];
403        assert_eq!(expected_index, Index::merged_from_json5_files(&files).unwrap());
404    }
405
406    #[cfg(feature = "serde")]
407    #[test]
408    fn serialize_deserialize() -> Result<()> {
409        let expected_index = {
410            let mut index = Index::default();
411            for i in 0..5 {
412                let moniker: Moniker = vec![i.to_string().as_str()].try_into().unwrap();
413                let instance_id = InstanceId::new_random(&mut rand::thread_rng());
414                index.insert(moniker, instance_id).unwrap();
415            }
416            index
417        };
418
419        let json_index = serde_json5::to_string(&expected_index)?;
420        let actual_index = serde_json5::from_str(&json_index)?;
421        assert_eq!(expected_index, actual_index);
422
423        Ok(())
424    }
425}