Source code for snapchat.research.gbml.gbml_config_pb2

"""
@generated by mypy-protobuf.  Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import snapchat.research.gbml.dataset_metadata_pb2
import snapchat.research.gbml.flattened_graph_metadata_pb2
import snapchat.research.gbml.graph_schema_pb2
import snapchat.research.gbml.inference_metadata_pb2
import snapchat.research.gbml.postprocessed_metadata_pb2
import snapchat.research.gbml.subgraph_sampling_strategy_pb2
import snapchat.research.gbml.trained_model_metadata_pb2
import sys

if sys.version_info >= (3, 8):
    import typing as typing_extensions
else:
    import typing_extensions

[docs] DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
[docs] class GbmlConfig(google.protobuf.message.Message): """ TODO: document all protos with comments. """
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] class TaskMetadata(google.protobuf.message.Message): """Indicates the training task specification and metadata for the config."""
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] class NodeBasedTaskMetadata(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] SUPERVISION_NODE_TYPES_FIELD_NUMBER: builtins.int
@property
[docs] def supervision_node_types(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ...
def __init__( self, *, supervision_node_types: collections.abc.Iterable[builtins.str] | None = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["supervision_node_types", b"supervision_node_types"]) -> None: ...
[docs] class NodeAnchorBasedLinkPredictionTaskMetadata(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] SUPERVISION_EDGE_TYPES_FIELD_NUMBER: builtins.int
@property
[docs] def supervision_edge_types(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[snapchat.research.gbml.graph_schema_pb2.EdgeType]: ...
def __init__( self, *, supervision_edge_types: collections.abc.Iterable[snapchat.research.gbml.graph_schema_pb2.EdgeType] | None = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["supervision_edge_types", b"supervision_edge_types"]) -> None: ...
[docs] class LinkBasedTaskMetadata(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] SUPERVISION_EDGE_TYPES_FIELD_NUMBER: builtins.int
@property
[docs] def supervision_edge_types(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[snapchat.research.gbml.graph_schema_pb2.EdgeType]: ...
def __init__( self, *, supervision_edge_types: collections.abc.Iterable[snapchat.research.gbml.graph_schema_pb2.EdgeType] | None = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["supervision_edge_types", b"supervision_edge_types"]) -> None: ...
[docs] NODE_BASED_TASK_METADATA_FIELD_NUMBER: builtins.int
@property
[docs] def node_based_task_metadata(self) -> global___GbmlConfig.TaskMetadata.NodeBasedTaskMetadata: ...
@property @property def __init__( self, *, node_based_task_metadata: global___GbmlConfig.TaskMetadata.NodeBasedTaskMetadata | None = ..., node_anchor_based_link_prediction_task_metadata: global___GbmlConfig.TaskMetadata.NodeAnchorBasedLinkPredictionTaskMetadata | None = ..., link_based_task_metadata: global___GbmlConfig.TaskMetadata.LinkBasedTaskMetadata | None = ..., ) -> None: ...
[docs] def HasField(self, field_name: typing_extensions.Literal["link_based_task_metadata", b"link_based_task_metadata", "node_anchor_based_link_prediction_task_metadata", b"node_anchor_based_link_prediction_task_metadata", "node_based_task_metadata", b"node_based_task_metadata", "task_metadata", b"task_metadata"]) -> builtins.bool: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["link_based_task_metadata", b"link_based_task_metadata", "node_anchor_based_link_prediction_task_metadata", b"node_anchor_based_link_prediction_task_metadata", "node_based_task_metadata", b"node_based_task_metadata", "task_metadata", b"task_metadata"]) -> None: ...
[docs] def WhichOneof(self, oneof_group: typing_extensions.Literal["task_metadata", b"task_metadata"]) -> typing_extensions.Literal["node_based_task_metadata", "node_anchor_based_link_prediction_task_metadata", "link_based_task_metadata"] | None: ...
[docs] class SharedConfig(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] class SharedArgsEntry(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] KEY_FIELD_NUMBER: builtins.int
[docs] VALUE_FIELD_NUMBER: builtins.int
[docs] key: builtins.str
[docs] value: builtins.str
def __init__( self, *, key: builtins.str = ..., value: builtins.str = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
[docs] PREPROCESSED_METADATA_URI_FIELD_NUMBER: builtins.int
[docs] FLATTENED_GRAPH_METADATA_FIELD_NUMBER: builtins.int
[docs] DATASET_METADATA_FIELD_NUMBER: builtins.int
[docs] TRAINED_MODEL_METADATA_FIELD_NUMBER: builtins.int
[docs] INFERENCE_METADATA_FIELD_NUMBER: builtins.int
[docs] POSTPROCESSED_METADATA_FIELD_NUMBER: builtins.int
[docs] SHARED_ARGS_FIELD_NUMBER: builtins.int
[docs] IS_GRAPH_DIRECTED_FIELD_NUMBER: builtins.int
[docs] SHOULD_SKIP_TRAINING_FIELD_NUMBER: builtins.int
[docs] SHOULD_SKIP_AUTOMATIC_TEMP_ASSET_CLEANUP_FIELD_NUMBER: builtins.int
[docs] SHOULD_SKIP_INFERENCE_FIELD_NUMBER: builtins.int
[docs] SHOULD_SKIP_MODEL_EVALUATION_FIELD_NUMBER: builtins.int
[docs] SHOULD_INCLUDE_ISOLATED_NODES_IN_TRAINING_FIELD_NUMBER: builtins.int
[docs] preprocessed_metadata_uri: builtins.str
"""Uri where DataPreprocessor generates the PreprocessedMetadata proto.""" @property
[docs] def flattened_graph_metadata(self) -> snapchat.research.gbml.flattened_graph_metadata_pb2.FlattenedGraphMetadata: """FlattenedGraphMetadata message, which designates locations of GraphFlat outputs."""
@property
[docs] def dataset_metadata(self) -> snapchat.research.gbml.dataset_metadata_pb2.DatasetMetadata: """DatasetMetadata message, which designates location of SplitGenerator outputs."""
@property
[docs] def trained_model_metadata(self) -> snapchat.research.gbml.trained_model_metadata_pb2.TrainedModelMetadata: """TrainedModelMetadata message, which designates location of Trainer outputs."""
@property
[docs] def inference_metadata(self) -> snapchat.research.gbml.inference_metadata_pb2.InferenceMetadata: """InferenceMetadata message, which designates location of Inferencer outputs."""
@property
[docs] def postprocessed_metadata(self) -> snapchat.research.gbml.postprocessed_metadata_pb2.PostProcessedMetadata: """PostProcessedMetadata message, which designates location of PostProcessor outputs."""
@property
[docs] def shared_args(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ...
[docs] is_graph_directed: builtins.bool
"""is the graph directed or undirected (bidirectional)"""
[docs] should_skip_training: builtins.bool
"""to skip training or not (inference only)"""
[docs] should_skip_automatic_temp_asset_cleanup: builtins.bool
"""If set to true, will skip automatic clean up of temp assets Useful if you are running hyper param tuning jobs and dont want to continuously run the whole pipeline """
[docs] should_skip_inference: builtins.bool
"""to skip inference or not (for training only jobs)"""
[docs] should_skip_model_evaluation: builtins.bool
"""If set, we will not compute or export model metrics like MRR, etc Has a side effect if should_skip_training is set as well to result in not generating training samples and only RNNs needed for inference. """
[docs] should_include_isolated_nodes_in_training: builtins.bool
"""If set to true, will include isolated nodes in training data As isolated nodes do not have positive neighbors, self loop will be added SGS outputs training samples including isolated nodes, trainer adds self loops in training subgraphs """ def __init__( self, *, preprocessed_metadata_uri: builtins.str = ..., flattened_graph_metadata: snapchat.research.gbml.flattened_graph_metadata_pb2.FlattenedGraphMetadata | None = ..., dataset_metadata: snapchat.research.gbml.dataset_metadata_pb2.DatasetMetadata | None = ..., trained_model_metadata: snapchat.research.gbml.trained_model_metadata_pb2.TrainedModelMetadata | None = ..., inference_metadata: snapchat.research.gbml.inference_metadata_pb2.InferenceMetadata | None = ..., postprocessed_metadata: snapchat.research.gbml.postprocessed_metadata_pb2.PostProcessedMetadata | None = ..., shared_args: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., is_graph_directed: builtins.bool = ..., should_skip_training: builtins.bool = ..., should_skip_automatic_temp_asset_cleanup: builtins.bool = ..., should_skip_inference: builtins.bool = ..., should_skip_model_evaluation: builtins.bool = ..., should_include_isolated_nodes_in_training: builtins.bool = ..., ) -> None: ...
[docs] def HasField(self, field_name: typing_extensions.Literal["dataset_metadata", b"dataset_metadata", "flattened_graph_metadata", b"flattened_graph_metadata", "inference_metadata", b"inference_metadata", "postprocessed_metadata", b"postprocessed_metadata", "trained_model_metadata", b"trained_model_metadata"]) -> builtins.bool: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["dataset_metadata", b"dataset_metadata", "flattened_graph_metadata", b"flattened_graph_metadata", "inference_metadata", b"inference_metadata", "is_graph_directed", b"is_graph_directed", "postprocessed_metadata", b"postprocessed_metadata", "preprocessed_metadata_uri", b"preprocessed_metadata_uri", "shared_args", b"shared_args", "should_include_isolated_nodes_in_training", b"should_include_isolated_nodes_in_training", "should_skip_automatic_temp_asset_cleanup", b"should_skip_automatic_temp_asset_cleanup", "should_skip_inference", b"should_skip_inference", "should_skip_model_evaluation", b"should_skip_model_evaluation", "should_skip_training", b"should_skip_training", "trained_model_metadata", b"trained_model_metadata"]) -> None: ...
[docs] class DatasetConfig(google.protobuf.message.Message): """Contains config related to generating training data for a GML task."""
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] class DataPreprocessorConfig(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] class DataPreprocessorArgsEntry(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] KEY_FIELD_NUMBER: builtins.int
[docs] VALUE_FIELD_NUMBER: builtins.int
[docs] key: builtins.str
[docs] value: builtins.str
def __init__( self, *, key: builtins.str = ..., value: builtins.str = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
[docs] DATA_PREPROCESSOR_CONFIG_CLS_PATH_FIELD_NUMBER: builtins.int
[docs] DATA_PREPROCESSOR_ARGS_FIELD_NUMBER: builtins.int
[docs] data_preprocessor_config_cls_path: builtins.str
"""Uri pointing to user-written DataPreprocessorConfig class definition.""" @property
[docs] def data_preprocessor_args(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """Arguments to instantiate concrete DataPreprocessorConfig instance with."""
def __init__( self, *, data_preprocessor_config_cls_path: builtins.str = ..., data_preprocessor_args: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["data_preprocessor_args", b"data_preprocessor_args", "data_preprocessor_config_cls_path", b"data_preprocessor_config_cls_path"]) -> None: ...
[docs] class SubgraphSamplerConfig(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] class ExperimentalFlagsEntry(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] KEY_FIELD_NUMBER: builtins.int
[docs] VALUE_FIELD_NUMBER: builtins.int
[docs] key: builtins.str
[docs] value: builtins.str
def __init__( self, *, key: builtins.str = ..., value: builtins.str = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
[docs] NUM_HOPS_FIELD_NUMBER: builtins.int
[docs] NUM_NEIGHBORS_TO_SAMPLE_FIELD_NUMBER: builtins.int
[docs] SUBGRAPH_SAMPLING_STRATEGY_FIELD_NUMBER: builtins.int
[docs] NUM_POSITIVE_SAMPLES_FIELD_NUMBER: builtins.int
[docs] EXPERIMENTAL_FLAGS_FIELD_NUMBER: builtins.int
[docs] NUM_MAX_TRAINING_SAMPLES_TO_OUTPUT_FIELD_NUMBER: builtins.int
[docs] NUM_USER_DEFINED_POSITIVE_SAMPLES_FIELD_NUMBER: builtins.int
[docs] NUM_USER_DEFINED_NEGATIVE_SAMPLES_FIELD_NUMBER: builtins.int
[docs] GRAPH_DB_CONFIG_FIELD_NUMBER: builtins.int
[docs] num_hops: builtins.int
"""number of hops for subgraph sampler to include"""
[docs] num_neighbors_to_sample: builtins.int
"""num_neighbors_to_sample indicates the max number of neighbors to sample for each hop num_neighbors_to_sample can be set to -1 to indicate no sampling (include all neighbors) """ @property
[docs] def subgraph_sampling_strategy(self) -> snapchat.research.gbml.subgraph_sampling_strategy_pb2.SubgraphSamplingStrategy: """num hops and num neighbors to sample is deprecated in favor of neighbor_sampling_strategy. Used to specify how the graphs which are used for message passing are constructed """
[docs] num_positive_samples: builtins.int
"""number of positive samples (1hop) used in NodeAnchorBasedLinkPredictionTask as part of loss computation. It cannot be 0. And it's recommended to be larger than 1 due to the split filtering logic in split generator, to guarantee most samples to have at least one positive for it to not be excluded in training. """ @property
[docs] def experimental_flags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """(deprecated) number of hard negative samples (3,4hops) used in NodeAnchorBasedLinkPredictionTask also used in loss computation. Random negatives will always be used even when there are no hard negatives uint32 num_hard_negative_samples = 4; Arguments for experimental_flags, can be permutation_strategy: 'deterministic' or 'non-deterministic' """
[docs] num_max_training_samples_to_output: builtins.int
"""max number of training samples (i.e. nodes to store as main samples for training) If this is not provided or is set to 0, all nodes will be included for training """
[docs] num_user_defined_positive_samples: builtins.int
"""number of user defined positive samples. Used in NodeAnchorBasedLinkPredictionTask as part of loss computation. If `num_user_defined_positive_samples` is specified `num_positive_samples` will be ignored as positive samples will only be drawn from user defined positive samples. """
[docs] num_user_defined_negative_samples: builtins.int
"""number of user defined negative samples. Treated as hard negative samples. Used in NodeAnchorBasedLinkPredictionTask Also used in loss computation. Random negatives will always be used even when there are no user defined hard negatives """ @property
[docs] def graph_db_config(self) -> global___GbmlConfig.GraphDBConfig: """If specified, intention is to run ingestion into graphDB for subgraph sampler"""
def __init__( self, *, num_hops: builtins.int = ..., num_neighbors_to_sample: builtins.int = ..., subgraph_sampling_strategy: snapchat.research.gbml.subgraph_sampling_strategy_pb2.SubgraphSamplingStrategy | None = ..., num_positive_samples: builtins.int = ..., experimental_flags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., num_max_training_samples_to_output: builtins.int = ..., num_user_defined_positive_samples: builtins.int = ..., num_user_defined_negative_samples: builtins.int = ..., graph_db_config: global___GbmlConfig.GraphDBConfig | None = ..., ) -> None: ...
[docs] def HasField(self, field_name: typing_extensions.Literal["graph_db_config", b"graph_db_config", "subgraph_sampling_strategy", b"subgraph_sampling_strategy"]) -> builtins.bool: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["experimental_flags", b"experimental_flags", "graph_db_config", b"graph_db_config", "num_hops", b"num_hops", "num_max_training_samples_to_output", b"num_max_training_samples_to_output", "num_neighbors_to_sample", b"num_neighbors_to_sample", "num_positive_samples", b"num_positive_samples", "num_user_defined_negative_samples", b"num_user_defined_negative_samples", "num_user_defined_positive_samples", b"num_user_defined_positive_samples", "subgraph_sampling_strategy", b"subgraph_sampling_strategy"]) -> None: ...
[docs] class SplitGeneratorConfig(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] class SplitStrategyArgsEntry(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] KEY_FIELD_NUMBER: builtins.int
[docs] VALUE_FIELD_NUMBER: builtins.int
[docs] key: builtins.str
[docs] value: builtins.str
def __init__( self, *, key: builtins.str = ..., value: builtins.str = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
[docs] class AssignerArgsEntry(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] KEY_FIELD_NUMBER: builtins.int
[docs] VALUE_FIELD_NUMBER: builtins.int
[docs] key: builtins.str
[docs] value: builtins.str
def __init__( self, *, key: builtins.str = ..., value: builtins.str = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
[docs] SPLIT_STRATEGY_CLS_PATH_FIELD_NUMBER: builtins.int
[docs] SPLIT_STRATEGY_ARGS_FIELD_NUMBER: builtins.int
[docs] ASSIGNER_CLS_PATH_FIELD_NUMBER: builtins.int
[docs] ASSIGNER_ARGS_FIELD_NUMBER: builtins.int
[docs] split_strategy_cls_path: builtins.str
"""Module path to concrete SplitStrategy instance.""" @property
[docs] def split_strategy_args(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """Arguments to instantiate concrete SplitStrategy instance with."""
[docs] assigner_cls_path: builtins.str
"""Module path to concrete Assigner instance""" @property
[docs] def assigner_args(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """Arguments to instantiate concrete Assigner instance with."""
def __init__( self, *, split_strategy_cls_path: builtins.str = ..., split_strategy_args: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., assigner_cls_path: builtins.str = ..., assigner_args: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["assigner_args", b"assigner_args", "assigner_cls_path", b"assigner_cls_path", "split_strategy_args", b"split_strategy_args", "split_strategy_cls_path", b"split_strategy_cls_path"]) -> None: ...
[docs] DATA_PREPROCESSOR_CONFIG_FIELD_NUMBER: builtins.int
[docs] SUBGRAPH_SAMPLER_CONFIG_FIELD_NUMBER: builtins.int
[docs] SPLIT_GENERATOR_CONFIG_FIELD_NUMBER: builtins.int
@property
[docs] def data_preprocessor_config(self) -> global___GbmlConfig.DatasetConfig.DataPreprocessorConfig: ...
@property
[docs] def subgraph_sampler_config(self) -> global___GbmlConfig.DatasetConfig.SubgraphSamplerConfig: ...
@property
[docs] def split_generator_config(self) -> global___GbmlConfig.DatasetConfig.SplitGeneratorConfig: ...
def __init__( self, *, data_preprocessor_config: global___GbmlConfig.DatasetConfig.DataPreprocessorConfig | None = ..., subgraph_sampler_config: global___GbmlConfig.DatasetConfig.SubgraphSamplerConfig | None = ..., split_generator_config: global___GbmlConfig.DatasetConfig.SplitGeneratorConfig | None = ..., ) -> None: ...
[docs] def HasField(self, field_name: typing_extensions.Literal["data_preprocessor_config", b"data_preprocessor_config", "split_generator_config", b"split_generator_config", "subgraph_sampler_config", b"subgraph_sampler_config"]) -> builtins.bool: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["data_preprocessor_config", b"data_preprocessor_config", "split_generator_config", b"split_generator_config", "subgraph_sampler_config", b"subgraph_sampler_config"]) -> None: ...
[docs] class GraphDBConfig(google.protobuf.message.Message): """Generic Configuration for a GraphDB connection."""
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] class GraphDbIngestionArgsEntry(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] KEY_FIELD_NUMBER: builtins.int
[docs] VALUE_FIELD_NUMBER: builtins.int
[docs] key: builtins.str
[docs] value: builtins.str
def __init__( self, *, key: builtins.str = ..., value: builtins.str = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
[docs] class GraphDbArgsEntry(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] KEY_FIELD_NUMBER: builtins.int
[docs] VALUE_FIELD_NUMBER: builtins.int
[docs] key: builtins.str
[docs] value: builtins.str
def __init__( self, *, key: builtins.str = ..., value: builtins.str = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
[docs] class GraphDBServiceConfig(google.protobuf.message.Message): """Scala-specific configuration."""
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] GRAPH_DB_CLIENT_CLASS_PATH_FIELD_NUMBER: builtins.int
[docs] graph_db_client_class_path: builtins.str
"""Scala absolute class path pointing to an implementation of `DBClient[DBResult]` e.g. `my.team.graph_db.DBClient`. """ def __init__( self, *, graph_db_client_class_path: builtins.str = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["graph_db_client_class_path", b"graph_db_client_class_path"]) -> None: ...
[docs] GRAPH_DB_INGESTION_CLS_PATH_FIELD_NUMBER: builtins.int
[docs] GRAPH_DB_INGESTION_ARGS_FIELD_NUMBER: builtins.int
[docs] GRAPH_DB_ARGS_FIELD_NUMBER: builtins.int
[docs] GRAPH_DB_SAMPLER_CONFIG_FIELD_NUMBER: builtins.int
[docs] graph_db_ingestion_cls_path: builtins.str
"""Python class path pointing to user-written `BaseIngestion`` class definition. e.g. `my.team.graph_db.BaseInjectionImpl`. This class is currently, as an implementation detail, used for injestion only. We document this *purely* for information purposes and may change the implementation at any time. """ @property
[docs] def graph_db_ingestion_args(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """Arguments to instantiate concrete BaseIngestion instance with."""
@property
[docs] def graph_db_args(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """General arguments required for graphDB (graph space, port, etc.) These are passed to both the Python and Scala implementations. """
@property
[docs] def graph_db_sampler_config(self) -> global___GbmlConfig.GraphDBConfig.GraphDBServiceConfig: """If provided, then an implementation of a `DBClient[DBResult]` Scala class for a GraphDB. Intended to be used to inject specific implementations at runtime. The object constructed from this is currently, as an implementation detail, used for sampling only. We document this *purely* for information purposes and may change the implementation at any time. """
def __init__( self, *, graph_db_ingestion_cls_path: builtins.str = ..., graph_db_ingestion_args: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., graph_db_args: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., graph_db_sampler_config: global___GbmlConfig.GraphDBConfig.GraphDBServiceConfig | None = ..., ) -> None: ...
[docs] def HasField(self, field_name: typing_extensions.Literal["graph_db_sampler_config", b"graph_db_sampler_config"]) -> builtins.bool: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["graph_db_args", b"graph_db_args", "graph_db_ingestion_args", b"graph_db_ingestion_args", "graph_db_ingestion_cls_path", b"graph_db_ingestion_cls_path", "graph_db_sampler_config", b"graph_db_sampler_config"]) -> None: ...
[docs] class TrainerConfig(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] class TrainerArgsEntry(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] KEY_FIELD_NUMBER: builtins.int
[docs] VALUE_FIELD_NUMBER: builtins.int
[docs] key: builtins.str
[docs] value: builtins.str
def __init__( self, *, key: builtins.str = ..., value: builtins.str = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
[docs] TRAINER_CLS_PATH_FIELD_NUMBER: builtins.int
[docs] TRAINER_ARGS_FIELD_NUMBER: builtins.int
[docs] CLS_PATH_FIELD_NUMBER: builtins.int
[docs] COMMAND_FIELD_NUMBER: builtins.int
[docs] SHOULD_LOG_TO_TENSORBOARD_FIELD_NUMBER: builtins.int
[docs] trainer_cls_path: builtins.str
"""(deprecated) Uri pointing to user-written BaseTrainer class definition. Used for the subgraph-sampling-based training process. """ @property
[docs] def trainer_args(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """Arguments to parameterize training process with."""
[docs] cls_path: builtins.str
"""Path pointing to trainer class definition."""
[docs] command: builtins.str
"""Command to use for launching trainer job"""
[docs] should_log_to_tensorboard: builtins.bool
"""Weather to log to tensorboard or not (defaults to false)""" def __init__( self, *, trainer_cls_path: builtins.str = ..., trainer_args: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., cls_path: builtins.str = ..., command: builtins.str = ..., should_log_to_tensorboard: builtins.bool = ..., ) -> None: ...
[docs] def HasField(self, field_name: typing_extensions.Literal["cls_path", b"cls_path", "command", b"command", "executable", b"executable"]) -> builtins.bool: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["cls_path", b"cls_path", "command", b"command", "executable", b"executable", "should_log_to_tensorboard", b"should_log_to_tensorboard", "trainer_args", b"trainer_args", "trainer_cls_path", b"trainer_cls_path"]) -> None: ...
[docs] def WhichOneof(self, oneof_group: typing_extensions.Literal["executable", b"executable"]) -> typing_extensions.Literal["cls_path", "command"] | None: ...
[docs] class InferencerConfig(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] class InferencerArgsEntry(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] KEY_FIELD_NUMBER: builtins.int
[docs] VALUE_FIELD_NUMBER: builtins.int
[docs] key: builtins.str
[docs] value: builtins.str
def __init__( self, *, key: builtins.str = ..., value: builtins.str = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
[docs] INFERENCER_ARGS_FIELD_NUMBER: builtins.int
[docs] INFERENCER_CLS_PATH_FIELD_NUMBER: builtins.int
[docs] CLS_PATH_FIELD_NUMBER: builtins.int
[docs] COMMAND_FIELD_NUMBER: builtins.int
[docs] INFERENCE_BATCH_SIZE_FIELD_NUMBER: builtins.int
@property
[docs] def inferencer_args(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ...
[docs] inferencer_cls_path: builtins.str
"""(deprecated) Path to modeling task spec class path to construct model for inference. Used for the subgraph-sampling-based inference process. """
[docs] cls_path: builtins.str
"""Path pointing to inferencer class definition."""
[docs] command: builtins.str
"""Command to use for launching inference job"""
[docs] inference_batch_size: builtins.int
"""Optional. If set, will be used to batch inference samples to a specific size before call for inference is made Defaults to setting in python/gigl/src/inference/gnn_inferencer.py """ def __init__( self, *, inferencer_args: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., inferencer_cls_path: builtins.str = ..., cls_path: builtins.str = ..., command: builtins.str = ..., inference_batch_size: builtins.int = ..., ) -> None: ...
[docs] def HasField(self, field_name: typing_extensions.Literal["cls_path", b"cls_path", "command", b"command", "executable", b"executable"]) -> builtins.bool: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["cls_path", b"cls_path", "command", b"command", "executable", b"executable", "inference_batch_size", b"inference_batch_size", "inferencer_args", b"inferencer_args", "inferencer_cls_path", b"inferencer_cls_path"]) -> None: ...
[docs] def WhichOneof(self, oneof_group: typing_extensions.Literal["executable", b"executable"]) -> typing_extensions.Literal["cls_path", "command"] | None: ...
[docs] class PostProcessorConfig(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] class PostProcessorArgsEntry(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] KEY_FIELD_NUMBER: builtins.int
[docs] VALUE_FIELD_NUMBER: builtins.int
[docs] key: builtins.str
[docs] value: builtins.str
def __init__( self, *, key: builtins.str = ..., value: builtins.str = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
[docs] POST_PROCESSOR_ARGS_FIELD_NUMBER: builtins.int
[docs] POST_PROCESSOR_CLS_PATH_FIELD_NUMBER: builtins.int
@property
[docs] def post_processor_args(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ...
[docs] post_processor_cls_path: builtins.str
def __init__( self, *, post_processor_args: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., post_processor_cls_path: builtins.str = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["post_processor_args", b"post_processor_args", "post_processor_cls_path", b"post_processor_cls_path"]) -> None: ...
[docs] class MetricsConfig(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] class MetricsArgsEntry(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] KEY_FIELD_NUMBER: builtins.int
[docs] VALUE_FIELD_NUMBER: builtins.int
[docs] key: builtins.str
[docs] value: builtins.str
def __init__( self, *, key: builtins.str = ..., value: builtins.str = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
[docs] METRICS_CLS_PATH_FIELD_NUMBER: builtins.int
[docs] METRICS_ARGS_FIELD_NUMBER: builtins.int
[docs] metrics_cls_path: builtins.str
@property
[docs] def metrics_args(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ...
def __init__( self, *, metrics_cls_path: builtins.str = ..., metrics_args: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["metrics_args", b"metrics_args", "metrics_cls_path", b"metrics_cls_path"]) -> None: ...
[docs] class ProfilerConfig(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] class ProfilerArgsEntry(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] KEY_FIELD_NUMBER: builtins.int
[docs] VALUE_FIELD_NUMBER: builtins.int
[docs] key: builtins.str
[docs] value: builtins.str
def __init__( self, *, key: builtins.str = ..., value: builtins.str = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
[docs] SHOULD_ENABLE_PROFILER_FIELD_NUMBER: builtins.int
[docs] PROFILER_LOG_DIR_FIELD_NUMBER: builtins.int
[docs] PROFILER_ARGS_FIELD_NUMBER: builtins.int
[docs] should_enable_profiler: builtins.bool
[docs] profiler_log_dir: builtins.str
@property
[docs] def profiler_args(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ...
def __init__( self, *, should_enable_profiler: builtins.bool = ..., profiler_log_dir: builtins.str = ..., profiler_args: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["profiler_args", b"profiler_args", "profiler_log_dir", b"profiler_log_dir", "should_enable_profiler", b"should_enable_profiler"]) -> None: ...
[docs] class FeatureFlagsEntry(google.protobuf.message.Message):
[docs] DESCRIPTOR: google.protobuf.descriptor.Descriptor
[docs] KEY_FIELD_NUMBER: builtins.int
[docs] VALUE_FIELD_NUMBER: builtins.int
[docs] key: builtins.str
[docs] value: builtins.str
def __init__( self, *, key: builtins.str = ..., value: builtins.str = ..., ) -> None: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
[docs] TASK_METADATA_FIELD_NUMBER: builtins.int
[docs] GRAPH_METADATA_FIELD_NUMBER: builtins.int
[docs] SHARED_CONFIG_FIELD_NUMBER: builtins.int
[docs] DATASET_CONFIG_FIELD_NUMBER: builtins.int
[docs] TRAINER_CONFIG_FIELD_NUMBER: builtins.int
[docs] INFERENCER_CONFIG_FIELD_NUMBER: builtins.int
[docs] POST_PROCESSOR_CONFIG_FIELD_NUMBER: builtins.int
[docs] METRICS_CONFIG_FIELD_NUMBER: builtins.int
[docs] PROFILER_CONFIG_FIELD_NUMBER: builtins.int
[docs] FEATURE_FLAGS_FIELD_NUMBER: builtins.int
@property
[docs] def task_metadata(self) -> global___GbmlConfig.TaskMetadata: ...
@property
[docs] def graph_metadata(self) -> snapchat.research.gbml.graph_schema_pb2.GraphMetadata: ...
@property
[docs] def shared_config(self) -> global___GbmlConfig.SharedConfig: ...
@property
[docs] def dataset_config(self) -> global___GbmlConfig.DatasetConfig: ...
@property
[docs] def trainer_config(self) -> global___GbmlConfig.TrainerConfig: ...
@property
[docs] def inferencer_config(self) -> global___GbmlConfig.InferencerConfig: ...
@property
[docs] def post_processor_config(self) -> global___GbmlConfig.PostProcessorConfig: ...
@property
[docs] def metrics_config(self) -> global___GbmlConfig.MetricsConfig: ...
@property
[docs] def profiler_config(self) -> global___GbmlConfig.ProfilerConfig: ...
@property
[docs] def feature_flags(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ...
def __init__( self, *, task_metadata: global___GbmlConfig.TaskMetadata | None = ..., graph_metadata: snapchat.research.gbml.graph_schema_pb2.GraphMetadata | None = ..., shared_config: global___GbmlConfig.SharedConfig | None = ..., dataset_config: global___GbmlConfig.DatasetConfig | None = ..., trainer_config: global___GbmlConfig.TrainerConfig | None = ..., inferencer_config: global___GbmlConfig.InferencerConfig | None = ..., post_processor_config: global___GbmlConfig.PostProcessorConfig | None = ..., metrics_config: global___GbmlConfig.MetricsConfig | None = ..., profiler_config: global___GbmlConfig.ProfilerConfig | None = ..., feature_flags: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ...
[docs] def HasField(self, field_name: typing_extensions.Literal["dataset_config", b"dataset_config", "graph_metadata", b"graph_metadata", "inferencer_config", b"inferencer_config", "metrics_config", b"metrics_config", "post_processor_config", b"post_processor_config", "profiler_config", b"profiler_config", "shared_config", b"shared_config", "task_metadata", b"task_metadata", "trainer_config", b"trainer_config"]) -> builtins.bool: ...
[docs] def ClearField(self, field_name: typing_extensions.Literal["dataset_config", b"dataset_config", "feature_flags", b"feature_flags", "graph_metadata", b"graph_metadata", "inferencer_config", b"inferencer_config", "metrics_config", b"metrics_config", "post_processor_config", b"post_processor_config", "profiler_config", b"profiler_config", "shared_config", b"shared_config", "task_metadata", b"task_metadata", "trainer_config", b"trainer_config"]) -> None: ...
[docs] global___GbmlConfig = GbmlConfig