1use anyhow::Context;
8use camino::{Utf8Path, Utf8PathBuf};
9use clonable_error::ClonableError;
10use fidl_fuchsia_component_internal as fcomponent_internal;
11use moniker::Moniker;
12use std::collections::{HashMap, HashSet};
13use thiserror::Error;
14
15#[cfg(feature = "serde")]
16use serde::{Deserialize, Serialize};
17
18pub mod fidl_convert;
19mod instance_id;
20
21pub use instance_id::{InstanceId, InstanceIdError};
22
23#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))]
25#[derive(Debug, PartialEq, Eq, Clone)]
26pub struct IndexEntry {
27 pub instance_id: InstanceId,
28 pub moniker: Moniker,
29 #[cfg_attr(feature = "serde", serde(default))]
30 pub ignore_duplicate_id: bool,
31}
32
33#[cfg_attr(feature = "serde", derive(Deserialize, Serialize))]
38#[derive(Debug, PartialEq, Eq, Clone)]
39pub(crate) struct PersistedIndex {
40 instances: Vec<IndexEntry>,
41}
42
43#[cfg_attr(
47 feature = "serde",
48 derive(Deserialize, Serialize),
49 serde(try_from = "PersistedIndex", into = "PersistedIndex")
50)]
51#[derive(Debug, Default, PartialEq, Eq, Clone)]
52pub struct Index {
53 instances: Vec<IndexEntry>,
55
56 instance_ids: HashSet<InstanceId>,
58
59 monikers: HashSet<Moniker>,
61}
62
63#[derive(Error, Clone, Debug)]
64pub enum IndexError {
65 #[error("failed to read index file '{path}'")]
66 ReadFile {
67 #[source]
68 err: ClonableError,
69 path: Utf8PathBuf,
70 },
71 #[error("invalid index")]
72 ValidationError(#[from] ValidationError),
73 #[error("could not merge indices")]
74 MergeError(#[from] MergeError),
75 #[error("could not convert FIDL index")]
76 FidlConversionError(#[from] fidl_convert::FidlConversionError),
77}
78
79impl Index {
80 pub fn from_fidl_file(path: &Utf8Path) -> Result<Self, IndexError> {
82 fn fidl_index_from_file(
83 path: &Utf8Path,
84 ) -> Result<fcomponent_internal::ComponentIdIndex, anyhow::Error> {
85 let raw_content = std::fs::read(path).context("failed to read file")?;
86 let fidl_index = fidl::unpersist::<fcomponent_internal::ComponentIdIndex>(&raw_content)
87 .context("failed to unpersist FIDL")?;
88 Ok(fidl_index)
89 }
90 let fidl_index = fidl_index_from_file(path)
91 .map_err(|err| IndexError::ReadFile { err: err.into(), path: path.to_owned() })?;
92 let index = Index::try_from(fidl_index)?;
93 Ok(index)
94 }
95
96 #[cfg(feature = "serde")]
98 pub fn merged_from_json5_files(paths: &[Utf8PathBuf]) -> Result<Self, IndexError> {
99 fn index_from_json5_file(path: &Utf8Path) -> Result<Index, anyhow::Error> {
100 let mut file = std::fs::File::open(&path).context("failed to open")?;
101 let index: Index = serde_json5::from_reader(&mut file).context("failed to parse")?;
102 Ok(index)
103 }
104 let mut ctx = MergeContext::default();
105 for path in paths {
106 let index = index_from_json5_file(path)
107 .map_err(|err| IndexError::ReadFile { err: err.into(), path: path.to_owned() })?;
108 ctx.merge(path, &index)?;
109 }
110 Ok(ctx.output())
111 }
112
113 pub fn insert(&mut self, entry: IndexEntry) -> Result<(), ValidationError> {
115 if !entry.ignore_duplicate_id {
116 if !self.instance_ids.insert(entry.instance_id.clone()) {
117 return Err(ValidationError::DuplicateId(entry.instance_id));
118 }
119 }
120 if !self.monikers.insert(entry.moniker.clone()) {
121 return Err(ValidationError::DuplicateMoniker(entry.moniker));
122 }
123 self.instances.push(entry);
124 Ok(())
125 }
126
127 pub fn id_for_moniker(&self, moniker: &Moniker) -> Option<&InstanceId> {
129 self.instances.iter().find(|e| &e.moniker == moniker).map(|e| &e.instance_id)
130 }
131
132 pub fn moniker_for_id(&self, id: &InstanceId) -> Option<&Moniker> {
135 self.instances.iter().find(|e| &e.instance_id == id).map(|e| &e.moniker)
136 }
137
138 pub fn contains_id(&self, id: &InstanceId) -> bool {
140 self.instance_ids.contains(id)
141 }
142
143 pub fn iter(&self) -> impl Iterator<Item = &IndexEntry> {
144 self.instances.iter()
145 }
146}
147
148impl TryFrom<PersistedIndex> for Index {
149 type Error = ValidationError;
150
151 fn try_from(value: PersistedIndex) -> Result<Self, Self::Error> {
152 let mut index = Index::default();
153 for entry in value.instances.into_iter() {
154 index.insert(entry)?;
155 }
156 Ok(index)
157 }
158}
159
160impl From<Index> for PersistedIndex {
161 fn from(mut value: Index) -> Self {
162 value.instances.sort_by(|a, b| a.moniker.cmp(&b.moniker));
163 Self { instances: value.instances }
164 }
165}
166
167#[derive(Error, Debug, Clone, PartialEq)]
168pub enum ValidationError {
169 #[error("duplicate moniker: {}", .0)]
170 DuplicateMoniker(Moniker),
171 #[error("duplicate instance ID: {}", .0)]
172 DuplicateId(InstanceId),
173}
174
175#[derive(Error, Debug, Clone, PartialEq)]
176pub enum MergeError {
177 #[error("Moniker {}' must be unique but exists in following index files:\n {}\n {}", .moniker, .source1, .source2)]
178 DuplicateMoniker { moniker: Moniker, source1: Utf8PathBuf, source2: Utf8PathBuf },
179 #[error("Instance ID '{}' must be unique but exists in following index files:\n {}\n {}", .instance_id, .source1, .source2)]
180 DuplicateId { instance_id: InstanceId, source1: Utf8PathBuf, source2: Utf8PathBuf },
181}
182
183pub struct MergeContext {
185 output_index: Index,
187 moniker_to_source_path: HashMap<Moniker, Utf8PathBuf>,
189 instance_id_to_source_path: HashMap<InstanceId, Utf8PathBuf>,
191}
192
193impl MergeContext {
194 pub fn merge(&mut self, source_index_path: &Utf8Path, index: &Index) -> Result<(), MergeError> {
199 for instance in &index.instances {
200 self.output_index.insert(instance.clone()).map_err(|err| match err {
201 ValidationError::DuplicateMoniker(moniker) => {
202 let previous_source_path =
203 self.moniker_to_source_path.get(&moniker).cloned().unwrap_or_default();
204 MergeError::DuplicateMoniker {
205 moniker,
206 source1: previous_source_path,
207 source2: source_index_path.to_owned(),
208 }
209 }
210 ValidationError::DuplicateId(instance_id) => {
211 let previous_source_path = self
212 .instance_id_to_source_path
213 .get(&instance_id)
214 .cloned()
215 .unwrap_or_default();
216 MergeError::DuplicateId {
217 instance_id,
218 source1: previous_source_path,
219 source2: source_index_path.to_owned(),
220 }
221 }
222 })?;
223 self.instance_id_to_source_path
224 .insert(instance.instance_id.clone(), source_index_path.to_owned());
225 self.moniker_to_source_path
226 .insert(instance.moniker.clone(), source_index_path.to_owned());
227 }
228 Ok(())
229 }
230
231 pub fn output(self) -> Index {
233 self.output_index
234 }
235}
236
237impl Default for MergeContext {
238 fn default() -> Self {
239 MergeContext {
240 output_index: Index::default(),
241 instance_id_to_source_path: HashMap::new(),
242 moniker_to_source_path: HashMap::new(),
243 }
244 }
245}
246
247#[cfg(test)]
248mod tests {
249 use super::*;
250 use anyhow::Result;
251
252 #[test]
253 fn merge_empty_index() {
254 let ctx = MergeContext::default();
255 assert_eq!(ctx.output(), Index::default());
256 }
257
258 #[test]
259 fn merge_single_index() -> Result<()> {
260 let mut ctx = MergeContext::default();
261
262 let mut index = Index::default();
263 let moniker = ["foo"].try_into().unwrap();
264 let instance_id = InstanceId::new_random(&mut rand::rng());
265 index.insert(IndexEntry { moniker, instance_id, ignore_duplicate_id: false }).unwrap();
266
267 ctx.merge(Utf8Path::new("/random/file/path"), &index)?;
268 assert_eq!(ctx.output(), index.clone());
269 Ok(())
270 }
271
272 #[test]
273 fn merge_duplicate_id() -> Result<()> {
274 let source1 = Utf8Path::new("/a/b/c");
275 let source2 = Utf8Path::new("/d/e/f");
276
277 let id = InstanceId::new_random(&mut rand::rng());
278 let index1 = {
279 let mut index = Index::default();
280 let moniker = ["foo"].try_into().unwrap();
281 index
282 .insert(IndexEntry { moniker, instance_id: id.clone(), ignore_duplicate_id: false })
283 .unwrap();
284 index
285 };
286 let index2 = {
287 let mut index = Index::default();
288 let moniker = ["bar"].try_into().unwrap();
289 index
290 .insert(IndexEntry { moniker, instance_id: id.clone(), ignore_duplicate_id: false })
291 .unwrap();
292 index
293 };
294
295 let mut ctx = MergeContext::default();
296 ctx.merge(source1, &index1)?;
297
298 let err = ctx.merge(source2, &index2).unwrap_err();
299 assert_eq!(
300 err,
301 MergeError::DuplicateId {
302 instance_id: id,
303 source1: source1.to_owned(),
304 source2: source2.to_owned()
305 }
306 );
307
308 Ok(())
309 }
310
311 #[test]
312 fn merge_duplicate_id_with_ignore_flag() {
313 let source1 = Utf8Path::new("/a/b/c");
314 let source2 = Utf8Path::new("/d/e/f");
315
316 let id = InstanceId::new_random(&mut rand::rng());
317 let index1 = {
318 let mut index = Index::default();
319 let moniker = ["foo"].try_into().unwrap();
320 index
321 .insert(IndexEntry { moniker, instance_id: id.clone(), ignore_duplicate_id: false })
322 .unwrap();
323 index
324 };
325 let index2 = {
326 let mut index = Index::default();
327 let moniker = ["bar"].try_into().unwrap();
328 index
329 .insert(IndexEntry { moniker, instance_id: id.clone(), ignore_duplicate_id: true })
330 .unwrap();
331 index
332 };
333
334 let mut ctx = MergeContext::default();
335 ctx.merge(source1, &index1).expect("unexpected merge error");
336 ctx.merge(source2, &index2).expect("unexpected merge error");
337 }
338
339 #[test]
340 fn merge_duplicate_moniker() -> Result<()> {
341 let source1 = Utf8Path::new("/a/b/c");
342 let source2 = Utf8Path::new("/d/e/f");
343
344 let moniker: Moniker = ["foo"].try_into().unwrap();
345 let index1 = {
346 let mut index = Index::default();
347 let instance_id = InstanceId::new_random(&mut rand::rng());
348 index
349 .insert(IndexEntry {
350 moniker: moniker.clone(),
351 instance_id,
352 ignore_duplicate_id: false,
353 })
354 .unwrap();
355 index
356 };
357 let index2 = {
358 let mut index = Index::default();
359 let instance_id = InstanceId::new_random(&mut rand::rng());
360 index
361 .insert(IndexEntry {
362 moniker: moniker.clone(),
363 instance_id,
364 ignore_duplicate_id: false,
365 })
366 .unwrap();
367 index
368 };
369
370 let mut ctx = MergeContext::default();
371 ctx.merge(source1, &index1)?;
372
373 let err = ctx.merge(source2, &index2).unwrap_err();
374 assert_eq!(
375 err,
376 MergeError::DuplicateMoniker {
377 moniker,
378 source1: source1.to_owned(),
379 source2: source2.to_owned()
380 }
381 );
382
383 Ok(())
384 }
385
386 #[cfg(feature = "serde")]
387 #[test]
388 fn merged_from_json5_files() {
389 use std::io::Write;
390
391 let mut index_file_1 = tempfile::NamedTempFile::new().unwrap();
392 index_file_1
393 .write_all(
394 r#"{
395 // Here is a comment.
396 instances: [
397 {
398 instance_id: "fb94044d62278b37c221c7fdeebdcf1304262f3e11416f68befa5ef88b7a2163",
399 moniker: "/a/b"
400 }
401 ]
402 }"#
403 .as_bytes(),
404 )
405 .unwrap();
406
407 let mut index_file_2 = tempfile::NamedTempFile::new().unwrap();
408 index_file_2
409 .write_all(
410 r#"{
411 // Here is a comment.
412 instances: [
413 {
414 instance_id: "4f915af6c4b682867ab7ad2dc9cbca18342ddd9eec61724f19d231cf6d07f122",
415 moniker: "/c/d"
416 }
417 ]
418 }"#
419 .as_bytes(),
420 )
421 .unwrap();
422
423 let expected_index = {
424 let mut index = Index::default();
425 index
426 .insert(IndexEntry {
427 moniker: "/a/b".parse::<Moniker>().unwrap(),
428 instance_id: "fb94044d62278b37c221c7fdeebdcf1304262f3e11416f68befa5ef88b7a2163"
429 .parse::<InstanceId>()
430 .unwrap(),
431 ignore_duplicate_id: false,
432 })
433 .unwrap();
434 index
435 .insert(IndexEntry {
436 moniker: "/c/d".parse::<Moniker>().unwrap(),
437 instance_id: "4f915af6c4b682867ab7ad2dc9cbca18342ddd9eec61724f19d231cf6d07f122"
438 .parse::<InstanceId>()
439 .unwrap(),
440 ignore_duplicate_id: false,
441 })
442 .unwrap();
443 index
444 };
445
446 let files = [
448 Utf8PathBuf::from_path_buf(index_file_1.path().to_path_buf()).unwrap(),
449 Utf8PathBuf::from_path_buf(index_file_2.path().to_path_buf()).unwrap(),
450 ];
451 assert_eq!(expected_index, Index::merged_from_json5_files(&files).unwrap());
452 }
453
454 #[cfg(feature = "serde")]
455 #[test]
456 fn serialize_deserialize() -> Result<()> {
457 let expected_index = {
458 let mut index = Index::default();
459 for i in 0..5 {
460 let moniker: Moniker = [i.to_string().as_str()].try_into().unwrap();
461 let instance_id = InstanceId::new_random(&mut rand::rng());
462 index
463 .insert(IndexEntry { moniker, instance_id, ignore_duplicate_id: false })
464 .unwrap();
465 }
466 index
467 };
468
469 let json_index = serde_json5::to_string(&expected_index)?;
470 let actual_index = serde_json5::from_str(&json_index)?;
471 assert_eq!(expected_index, actual_index);
472
473 Ok(())
474 }
475}