Commit 35adfb8e by Venkatesh Seetharam

Add interface classification

parent 8900b0ae
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.metadata.classification;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Annotation to mark methods for consumption.
*/
@InterfaceAudience.Public
public class InterfaceAudience {
private InterfaceAudience() {
}
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface Private {
}
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface LimitedPrivate {
String[] value();
}
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface Public {
}
}
...@@ -20,6 +20,7 @@ package org.apache.hadoop.metadata.typesystem; ...@@ -20,6 +20,7 @@ package org.apache.hadoop.metadata.typesystem;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.metadata.classification.InterfaceAudience;
import org.apache.hadoop.metadata.typesystem.persistence.Id; import org.apache.hadoop.metadata.typesystem.persistence.Id;
import java.util.List; import java.util.List;
...@@ -60,11 +61,12 @@ public class Referenceable extends Struct implements IReferenceableInstance { ...@@ -60,11 +61,12 @@ public class Referenceable extends Struct implements IReferenceableInstance {
} }
/** /**
* @nopublic only use during deserialization * Not public - only use during deserialization
* @param guid * @param guid
* @param typeName * @param typeName
* @param values * @param values
*/ */
@InterfaceAudience.Private
public Referenceable(String guid, String typeName, Map<String, Object> values, public Referenceable(String guid, String typeName, Map<String, Object> values,
List<String> _traitNames, List<String> _traitNames,
Map<String, IStruct> _traits) { Map<String, IStruct> _traits) {
......
...@@ -18,6 +18,8 @@ ...@@ -18,6 +18,8 @@
package org.apache.hadoop.metadata.typesystem; package org.apache.hadoop.metadata.typesystem;
import org.apache.hadoop.metadata.classification.InterfaceAudience;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
...@@ -28,12 +30,10 @@ public class Struct implements IStruct { ...@@ -28,12 +30,10 @@ public class Struct implements IStruct {
public Struct(String typeName) { public Struct(String typeName) {
this.typeName = typeName; this.typeName = typeName;
values = new HashMap<String, Object>(); values = new HashMap<>();
} }
/** @InterfaceAudience.Private
@nopublic
*/
public Struct(String typeName, Map<String, Object> values) { public Struct(String typeName, Map<String, Object> values) {
this(typeName); this(typeName);
this.values.putAll(values); this.values.putAll(values);
...@@ -54,10 +54,7 @@ public class Struct implements IStruct { ...@@ -54,10 +54,7 @@ public class Struct implements IStruct {
values.put(attrName, value); values.put(attrName, value);
} }
/** @InterfaceAudience.Private
* @nopublic
* @return
*/
public Map<String, Object> getValuesMap() { public Map<String, Object> getValuesMap() {
return values; return values;
} }
......
...@@ -18,6 +18,8 @@ ...@@ -18,6 +18,8 @@
package org.apache.hadoop.metadata.typesystem.types; package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.base.Preconditions;
public final class AttributeDefinition { public final class AttributeDefinition {
public final String name; public final String name;
...@@ -42,6 +44,9 @@ public final class AttributeDefinition { ...@@ -42,6 +44,9 @@ public final class AttributeDefinition {
public AttributeDefinition(String name, String dataTypeName, public AttributeDefinition(String name, String dataTypeName,
Multiplicity multiplicity, boolean isComposite, boolean isUnique, Multiplicity multiplicity, boolean isComposite, boolean isUnique,
boolean isIndexable, String reverseAttributeName) { boolean isIndexable, String reverseAttributeName) {
Preconditions.checkNotNull(name);
Preconditions.checkNotNull(dataTypeName);
this.name = name; this.name = name;
this.dataTypeName = dataTypeName; this.dataTypeName = dataTypeName;
this.multiplicity = multiplicity; this.multiplicity = multiplicity;
...@@ -64,9 +69,9 @@ public final class AttributeDefinition { ...@@ -64,9 +69,9 @@ public final class AttributeDefinition {
if (!dataTypeName.equals(that.dataTypeName)) return false; if (!dataTypeName.equals(that.dataTypeName)) return false;
if (!multiplicity.equals(that.multiplicity)) return false; if (!multiplicity.equals(that.multiplicity)) return false;
if (!name.equals(that.name)) return false; if (!name.equals(that.name)) return false;
if (reverseAttributeName != null ? !reverseAttributeName.equals(that.reverseAttributeName) if (reverseAttributeName != null
: that ? !reverseAttributeName.equals(that.reverseAttributeName)
.reverseAttributeName != null) : that.reverseAttributeName != null)
return false; return false;
return true; return true;
......
...@@ -19,6 +19,7 @@ ...@@ -19,6 +19,7 @@
package org.apache.hadoop.metadata.typesystem.types; package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.classification.InterfaceAudience;
public class HierarchicalTypeDefinition<T extends HierarchicalType> extends StructTypeDefinition { public class HierarchicalTypeDefinition<T extends HierarchicalType> extends StructTypeDefinition {
...@@ -26,14 +27,15 @@ public class HierarchicalTypeDefinition<T extends HierarchicalType> extends Stru ...@@ -26,14 +27,15 @@ public class HierarchicalTypeDefinition<T extends HierarchicalType> extends Stru
public final String hierarchicalMetaTypeName; public final String hierarchicalMetaTypeName;
/** /**
* Used for json deserialization only * Used for json deserialization only.
* @nopublic * not intended public consumption
* @param hierarchicalMetaTypeName * @param hierarchicalMetaTypeName
* @param typeName * @param typeName
* @param superTypes * @param superTypes
* @param attributeDefinitions * @param attributeDefinitions
* @throws ClassNotFoundException * @throws ClassNotFoundException
*/ */
@InterfaceAudience.Private
public HierarchicalTypeDefinition(String hierarchicalMetaTypeName, public HierarchicalTypeDefinition(String hierarchicalMetaTypeName,
String typeName, String[] superTypes, String typeName, String[] superTypes,
AttributeDefinition[] attributeDefinitions) AttributeDefinition[] attributeDefinitions)
......
...@@ -20,6 +20,7 @@ package org.apache.hadoop.metadata.typesystem.types; ...@@ -20,6 +20,7 @@ package org.apache.hadoop.metadata.typesystem.types;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import org.apache.hadoop.metadata.MetadataException; import org.apache.hadoop.metadata.MetadataException;
import org.apache.hadoop.metadata.classification.InterfaceAudience;
import org.apache.hadoop.metadata.typesystem.TypesDef; import org.apache.hadoop.metadata.typesystem.TypesDef;
import javax.inject.Singleton; import javax.inject.Singleton;
...@@ -37,6 +38,7 @@ import java.util.Set; ...@@ -37,6 +38,7 @@ import java.util.Set;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
@Singleton @Singleton
@InterfaceAudience.Private
public class TypeSystem { public class TypeSystem {
private static final TypeSystem INSTANCE = new TypeSystem(); private static final TypeSystem INSTANCE = new TypeSystem();
public static SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); public static SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
...@@ -59,9 +61,9 @@ public class TypeSystem { ...@@ -59,9 +61,9 @@ public class TypeSystem {
} }
/** /**
* This is only used for testing prurposes. * This is only used for testing purposes. Not intended for public use.
* @nonpublic
*/ */
@InterfaceAudience.Private
public void reset() { public void reset() {
initialize(); initialize();
} }
......
...@@ -47,6 +47,11 @@ public class TypesUtil { ...@@ -47,6 +47,11 @@ public class TypesUtil {
return new AttributeDefinition(name, dataType, Multiplicity.OPTIONAL, false, null); return new AttributeDefinition(name, dataType, Multiplicity.OPTIONAL, false, null);
} }
public static AttributeDefinition createRequiredAttrDef(String name,
String dataType) {
return new AttributeDefinition(name, dataType, Multiplicity.REQUIRED, false, null);
}
public static AttributeDefinition createUniqueRequiredAttrDef(String name, public static AttributeDefinition createUniqueRequiredAttrDef(String name,
IDataType dataType) { IDataType dataType) {
return new AttributeDefinition(name, dataType.getName(), return new AttributeDefinition(name, dataType.getName(),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment