Based on much appreciated feedback from my earlier posts, here is my latest version.  This one is more compact and expressive, and the Duplicates function scales roughly linearly so I don't think that any performance has been sacrificed for expressive power.

[ValidationState(ValidationState.Enabled)]

public partial class ExampleElement

{

  [ValidationMethod(ValidationCategories.Menu | ValidationCategories.Save)]

  private void TestExampleElement(ValidationContext context)

  {

    var nonUniquePropNames = this.Properties.Select(p => p.Name).Duplicates();

    nonUniquePropNames.IfAny(pns =>

      context.LogError(String.Format("Non-unique property names: {0}", pns.CommaSeparatedList()), "Error 1", this));

 

    var nonUniqueSubPropNames = this.Properties.SelectMany(p => p.SubProperties).Select(p => p.Name).Duplicates();

    nonUniqueSubPropNames.IfAny(spns =>

      context.LogError(String.Format("Non-unique sub property names: {0} ", spns.CommaSeparatedList()), "Error 2", this));

  }

}

 

 

public static class C

{

  public static HashSet<T> Duplicates<T>(this IEnumerable<T> source)

  {

    HashSet<T> items = new HashSet<T>();

    HashSet<T> duplicates = new HashSet<T>();

    foreach (T item in source)

    {

      if (!items.Add(item))

          duplicates.Add(item);

    }

    return duplicates;

  }

 

  public static string CommaSeparatedList(this IEnumerable<string> source)

  {

    // source is not empty

    return source.Aggregate((agg, s) => agg + ", " + s);

  }

 

  public static void IfAny<T>(this IEnumerable<T> source, Action<IEnumerable<T>> act)

  {

      if (source.Count() > 0) act(source);

  }

}