Skip to content

Commit

Permalink
Version 1.1.1
Browse files Browse the repository at this point in the history
  • Loading branch information
Brandon Lefore committed Jan 26, 2024
1 parent c411357 commit 24e9f35
Show file tree
Hide file tree
Showing 278 changed files with 4,827 additions and 3,587 deletions.
2 changes: 1 addition & 1 deletion abacusai/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@
from .streaming_client import StreamingClient


__version__ = "1.1.0"
__version__ = "1.1.1"
30 changes: 25 additions & 5 deletions abacusai/api_class/abstract.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import inspect
import re
from abc import ABC
from copy import deepcopy
from typing import Any

from .enums import ApiEnum
Expand Down Expand Up @@ -137,7 +138,21 @@ def to_dict_helper(api_class_obj):
def from_dict(cls, input_dict: dict):
if input_dict:
if builder := cls._get_builder():
return builder.from_dict(input_dict)
config_class_key = None
value = next((key for key, val in builder.config_class_map.items() if val.__name__ == cls.__name__), None)
input_dict_with_config_key = input_dict
if value is not None:
input_dict_with_config_key = deepcopy(input_dict)
if builder.config_abstract_class._upper_snake_case_keys:
config_class_key = upper_snake_case(builder.config_class_key)
if config_class_key not in input_dict_with_config_key:
input_dict_with_config_key[config_class_key] = value
else:
config_class_key = builder.config_class_key
if config_class_key not in input_dict_with_config_key and camel_case(config_class_key) not in input_dict_with_config_key:
input_dict_with_config_key[config_class_key] = value

return builder.from_dict(input_dict_with_config_key)
if not cls._upper_snake_case_keys:
input_dict = {snake_case(k): v for k, v in input_dict.items()}
if not cls._support_kwargs:
Expand All @@ -156,24 +171,29 @@ class _ApiClassFactory(ABC):
@classmethod
def from_dict(cls, config: dict) -> ApiClass:
support_kwargs = cls.config_abstract_class and cls.config_abstract_class._support_kwargs
not_upper_snake_case_keys = cls.config_abstract_class and not cls.config_abstract_class._upper_snake_case_keys
config_class_key = cls.config_class_key if (not_upper_snake_case_keys) else camel_case(cls.config_class_key)
if not_upper_snake_case_keys and config_class_key not in config and camel_case(config_class_key) in config:
is_upper_snake_case_keys = cls.config_abstract_class and cls.config_abstract_class._upper_snake_case_keys
config_class_key = upper_snake_case(cls.config_class_key) if is_upper_snake_case_keys else cls.config_class_key
# Logic here is that the we keep the config_class_key in snake_case if _upper_snake_case_keys is False else we convert it to upper_snake_case
# if _upper_snake_case_keys is False then we check in both casing: 1. snake_case and 2. camel_case
if not is_upper_snake_case_keys and config_class_key not in config and camel_case(config_class_key) in config:
config_class_key = camel_case(config_class_key)

if not support_kwargs and config_class_key not in (config or {}):
raise KeyError(f'Could not find {config_class_key} in {config}')
config_class_type = config.get(config_class_key, None)

if isinstance(config_class_type, str):
config_class_type = config_class_type.upper()
config_class = cls.config_class_map.get(config_class_type)

if support_kwargs:
if config_class:
field_names = set((field.name) for field in dataclasses.fields(config_class))
trimmed_config = {}
kwargs = {}
for k, v in config.items():
if snake_case(k) in field_names:
trimmed_config[k] = v
trimmed_config[snake_case(k)] = v
else:
kwargs[k] = v
if len(kwargs):
Expand Down
11 changes: 11 additions & 0 deletions abacusai/api_class/dataset_application_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,16 @@ def __post_init__(self):
self.application_connector_type = enums.ApplicationConnectorType.ZENDESK


@dataclasses.dataclass
class AbacusUsageMetricsDatasetConfig(DatasetConfig):
"""
Dataset config for Abacus Usage Metrics Application Connector
"""

def __post_init__(self):
self.application_connector_type = enums.ApplicationConnectorType.ABACUSUSAGEMETRICS


@dataclasses.dataclass
class _DatasetConfigFactory(_ApiClassFactory):
config_abstract_class = DatasetConfig
Expand All @@ -154,4 +164,5 @@ class _DatasetConfigFactory(_ApiClassFactory):
enums.ApplicationConnectorType.ONEDRIVE: OneDriveDatasetConfig,
enums.ApplicationConnectorType.SHAREPOINT: SharepointDatasetConfig,
enums.ApplicationConnectorType.ZENDESK: ZendeskDatasetConfig,
enums.ApplicationConnectorType.ABACUSUSAGEMETRICS: AbacusUsageMetricsDatasetConfig,
}
12 changes: 12 additions & 0 deletions abacusai/api_class/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,6 +282,17 @@ class ForecastingQuanitlesExtensionMethod(ApiEnum):
ANCESTRAL_SIMULATION = 'simulation'


# Timeseries Anomaly Detection
class TimeseriesAnomalyDataSplitType(ApiEnum):
AUTO = 'Automatic Time Based'
TIMESTAMP = 'Fixed Timestamp Based'


class TimeseriesAnomalyTypeOfAnomaly(ApiEnum):
HIGH_PEAK = 'high_peak'
LOW_PEAK = 'low_peak'


# Named Entity Recognition
class NERObjective(ApiEnum):
LOG_LOSS = 'log_loss'
Expand Down Expand Up @@ -338,6 +349,7 @@ class ApplicationConnectorType(ApiEnum):
SLACK = 'SLACK'
SHAREPOINT = 'SHAREPOINT'
TEAMS = 'TEAMS'
ABACUSUSAGEMETRICS = 'ABACUSUSAGEMETRICS'


class PythonFunctionArgumentType(ApiEnum):
Expand Down
15 changes: 12 additions & 3 deletions abacusai/api_class/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -620,14 +620,23 @@ class TimeseriesAnomalyTrainingConfig(TrainingConfig):
Training config for the TS_ANOMALY problem type
Args:
type_of_split: Type of data splitting into train/test.
type_of_split (TimeseriesAnomalyDataSplitType): Type of data splitting into train/test.
test_start (str): Limit training data to dates before the given test start.
test_split (int): Percent of dataset to use for test data. We support using a range between 5 ( i.e. 5% ) to 20 ( i.e. 20% ) of your dataset.
fill_missing_values (List[dict]): strategies to fill missing values and missing timestamps
handle_zeros_as_missing_values (bool): If True, handle zero values in numeric columns as missing data
timeseries_frequency (str): set this to control frequency of filling missing values
min_samples_in_normal_region (int): Adjust this to fine-tune the number of anomalies to be identified.
anomaly_type (TimeseriesAnomalyTypeOfAnomaly): select what kind of peaks to detect as anomalies
"""
type_of_split: str = dataclasses.field(default=None)
type_of_split: enums.TimeseriesAnomalyDataSplitType = dataclasses.field(default=None)
test_start: str = dataclasses.field(default=None)
test_split: int = dataclasses.field(default=None)
fill_missing_values: List[dict] = dataclasses.field(default=None)
handle_zeros_as_missing_values: bool = dataclasses.field(default=None)
timeseries_frequency: str = dataclasses.field(default=None)
min_samples_in_normal_region: int = dataclasses.field(default=None)
anomaly_type: enums.TimeseriesAnomalyTypeOfAnomaly = dataclasses.field(default=None)

def __post_init__(self):
self.problem_type = enums.ProblemType.TS_ANOMALY
Expand Down
1 change: 0 additions & 1 deletion abacusai/api_class/refresh.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@ def to_dict(self):

@dataclasses.dataclass
class DatabaseConnectorExportConfig(FeatureGroupExportConfig):
connector_type: enums.ConnectorType = dataclasses.field(default=enums.ConnectorType.DATABASE, repr=False)
database_connector_id: str = dataclasses.field(default=None)
mode: str = dataclasses.field(default=None)
object_name: str = dataclasses.field(default=None)
Expand Down
2 changes: 1 addition & 1 deletion abacusai/application_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def to_dict(self):
'name': self.name, 'created_at': self.created_at, 'status': self.status, 'auth': self.auth}
return {key: value for key, value in resp.items() if value is not None}

def rename(self, name: str):
def rename(self, name: str = None):
"""
Renames a Application Connector
Expand Down
8 changes: 4 additions & 4 deletions abacusai/batch_prediction.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ class BatchPrediction(AbstractApiClass):
fileOutputFormat (str): The format of the batch prediction output (CSV or JSON).
connectorType (str): Null if writing to internal console, else FEATURE_GROUP | FILE_CONNECTOR | DATABASE_CONNECTOR.
legacyInputLocation (str): The location of the input data.
featureGroupTableName (str): The table name of the Batch Prediction feature group.
featureGroupTableName (str): The table name of the Batch Prediction output feature group.
summaryFeatureGroupTableName (str): The table name of the metrics summary feature group output by Batch Prediction.
csvInputPrefix (str): A prefix to prepend to the input columns, only applies when output format is CSV.
csvPredictionPrefix (str): A prefix to prepend to the prediction columns, only applies when output format is CSV.
Expand Down Expand Up @@ -183,7 +183,7 @@ def set_database_connector_output(self, database_connector_id: str = None, datab
"""
return self.client.set_batch_prediction_database_connector_output(self.batch_prediction_id, database_connector_id, database_output_config)

def set_feature_group_output(self, table_name: str):
def set_feature_group_output(self, table_name: str = None):
"""
Creates a feature group and sets it as the batch prediction output.
Expand All @@ -207,7 +207,7 @@ def set_output_to_console(self):
"""
return self.client.set_batch_prediction_output_to_console(self.batch_prediction_id)

def set_feature_group(self, feature_group_type: str, feature_group_id: str = None):
def set_feature_group(self, feature_group_type: str = None, feature_group_id: str = None):
"""
Sets the batch prediction input feature group.
Expand All @@ -220,7 +220,7 @@ def set_feature_group(self, feature_group_type: str, feature_group_id: str = Non
"""
return self.client.set_batch_prediction_feature_group(self.batch_prediction_id, feature_group_type, feature_group_id)

def set_dataset_remap(self, dataset_id_remap: dict):
def set_dataset_remap(self, dataset_id_remap: dict = None):
"""
For the purpose of this batch prediction, will swap out datasets in the training feature groups
Expand Down
4 changes: 2 additions & 2 deletions abacusai/chat_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def get(self):
"""
return self.client.get_chat_session(self.chat_session_id)

def delete_chat_message(self, message_index: int):
def delete_chat_message(self, message_index: int = None):
"""
Deletes a message in a chat session and its associated response.
Expand All @@ -84,7 +84,7 @@ def export(self):
"""
return self.client.export_chat_session(self.chat_session_id)

def rename(self, name: str):
def rename(self, name: str = None):
"""
Renames a chat session with Data Science Co-pilot.
Expand Down
Loading

0 comments on commit 24e9f35

Please sign in to comment.