REST API Types

The following classes are used as API parameter and response types for the SynthAPI.

Jobs

class hazy_configurator.api_types.jobs.DispatchTaskState(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)
WAITING = 'WAITING'
PROCESSING = 'PROCESSING'
SUCCEEDED = 'SUCCEEDED'
FAILED = 'FAILED'
KILLED = 'KILLED'
property is_finished: bool
property is_busy: bool
classmethod get_finished_states() List[DispatchTaskState]
classmethod get_busy_states() List[DispatchTaskState]
class hazy_configurator.api_types.jobs.DestinationType(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)
DOWNLOAD = 'DOWNLOAD'
DISK = 'DISK'
S3 = 'S3'
GCS = 'GCS'
DATABASE = 'DATABASE'
AZURE = 'AZURE'
GBQ = 'GBQ'
class hazy_configurator.api_types.jobs.DispatchTaskProgress
Fields:
  • current_process_unit (int)

  • message (Optional[str])

  • total_process_units (int)

field current_process_unit: int = 0
field total_process_units: int [Required]
field message: Optional[str] = None
class hazy_configurator.api_types.jobs.TrainJobDetails
Fields:
field model_name: str [Required]
field model_id: UUID [Required]
field show_url: str [Required]
field dispatch_task_id: int [Required]
class hazy_configurator.api_types.jobs.GenerateJobDetails
Fields:
field task_id: int [Required]
field run_id: UUID [Required]
field model_id: UUID [Required]
field show_url: str [Required]
class hazy_configurator.api_types.jobs.GenerateJob
Fields:
field run_id: UUID [Required]
field model_id: UUID [Required]
field config: GenerationConfig [Required]
field created_at: datetime [Required]
field finished_at: Optional[datetime] = None
field progress: Optional[DispatchTaskProgress] = None
field state: DispatchTaskState [Required]
field destination_type: Optional[DestinationType] = None
field destination: str [Required]
field image: Optional[str] = None
class hazy_configurator.api_types.jobs.TrainJob
Fields:
field created_at: datetime [Required]
field finished_at: Optional[datetime] = None
field progress: Optional[DispatchTaskProgress] = None
field state: DispatchTaskState [Required]

Models

class hazy_configurator.api_types.models.Model
Fields:
field id: str [Required]
field name: str [Required]
field description: Optional[str] = None
field project_id: int [Required]
field generation_schema_version: SchemaVersion [Required]
field is_archived: bool [Required]
class hazy_configurator.api_types.models.TrainMetadata
Fields:
  • adjacency_type (hazy_configurator.base.enums.AdjacencyType)

  • tables (List[hazy_configurator.general_params.train_data_metadata.TrainTableMetadata])

field tables: List[TrainTableMetadata] [Required]
field adjacency_type: AdjacencyType [Required]
class hazy_configurator.api_types.models.TrainTableMetadata
Fields:
  • is_normal_table (bool)

  • row_count (int)

  • table_name (str)

  • table_type (hazy_configurator.base.enums.TableType)

field table_name: str [Required]
field table_type: TableType [Required]
field row_count: int [Required]
field is_normal_table: bool = False
class hazy_configurator.api_types.models.AdjacencyType(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)
IDENTITY = 'identity'
RANDOM = 'random'
DEGREE_PRESERVING = 'degree_preserving'
COMPONENT_PRESERVING = 'component_preserving'
class hazy_configurator.api_types.models.TableType(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)
TABULAR = 'tabular'
SEQUENTIAL = 'sequential'
REFERENCE = 'reference'

Projects

class hazy_configurator.api_types.projects.Project
Fields:
field id: int [Required]
field name: str [Required]
field description: Optional[str] = None
field is_archived: bool [Required]

Data Sources

class hazy_configurator.api_types.data_sources.DataSourceIO(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)
INPUT = 'input'
OUTPUT = 'output'
INPUT_OUTPUT = 'input_output'
UPLOAD = 'upload'
DOWNLOAD = 'download'
class hazy_configurator.api_types.data_sources.SensitiveDataSourceType(value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)

The type of the data source connection.

S3 = 's3_sensitive'
GCS = 'gcs_sensitive'
GBQ = 'gbq_sensitive'
DATABASE = 'database_sensitive'
DISK = 'disk_sensitive'
AZURE = 'azure_sensitive'
DB2 = 'db2_sensitive'
MSSQL = 'mssql_sensitive'
SNOWFLAKE = 'snowflake_sensitive'
SQLITE = 'sqlite_sensitive'
DATABRICKS = 'databricks_sensitive'
POSTGRES = 'postgres_sensitive'
ORACLE = 'oracle_sensitive'
classmethod can_list_files() Set[SensitiveDataSourceType]
classmethod from_path(path: str) SensitiveDataSourceType
class hazy_configurator.api_types.data_sources.SecretDataSource

Used to reference encrypted data source credentials stored by Hazy

Example

from hazy_configurator import SecretDataSource

data_source = SecretDataSource(
    id="eee3537f-9ea5-4e8a-af03-af6526fef730",
    name="Input bucket 0",
    source_type=SensitiveDataSourceType.S3
)
Fields:
field id: UUID [Required]

UUID of connection/data source.

field name: Optional[str] = None

Human readable label for the connection/data source.

field source_type: Optional[SensitiveDataSourceType] = None
field io: Optional[DataSourceIO] = None