Skip to content

Commit

Permalink
Version 1.4.22
Browse files Browse the repository at this point in the history
  • Loading branch information
quantumfusion committed Dec 5, 2024
1 parent 88a57fd commit 39bd61e
Show file tree
Hide file tree
Showing 498 changed files with 15,668 additions and 4,983 deletions.
3 changes: 2 additions & 1 deletion abacusai/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from .categorical_range_violation import CategoricalRangeViolation
from .chat_message import ChatMessage
from .chat_session import ChatSession
from .chatllm_computer import ChatllmComputer
from .chatllm_referral_invite import ChatllmReferralInvite
from .client import AgentResponse, ApiClient, ApiException, ClientOptions, ReadOnlyClient, _request_context
from .code_autocomplete_response import CodeAutocompleteResponse
Expand Down Expand Up @@ -225,4 +226,4 @@
from .workflow_node_template import WorkflowNodeTemplate


__version__ = "1.4.21"
__version__ = "1.4.22"
63 changes: 49 additions & 14 deletions abacusai/api_class/ai_agents.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import ast
import dataclasses
from typing import Any, Dict, List, Union
from typing import Dict, List, Union

from . import enums
from .abstract import ApiClass, get_clean_function_source_code_for_agent, validate_constructor_arg_types
Expand Down Expand Up @@ -55,7 +55,7 @@ class WorkflowNodeInputMapping(ApiClass):
Args:
name (str): The name of the input variable of the node function.
variable_type (WorkflowNodeInputType): The type of the input.
variable_type (Union[WorkflowNodeInputType, str]): The type of the input. If the type is `IGNORE`, the input will be ignored.
variable_source (str): The name of the node this variable is sourced from.
If the type is `WORKFLOW_VARIABLE`, the value given by the source node will be directly used.
If the type is `USER_INPUT`, the value given by the source node will be used as the default initial value before the user edits it.
Expand All @@ -67,7 +67,12 @@ class WorkflowNodeInputMapping(ApiClass):
variable_source: str = dataclasses.field(default=None)
source_prop: str = dataclasses.field(default=None)
is_required: bool = dataclasses.field(default=True)
default_value: Any = dataclasses.field(default=None)

def __post_init__(self):
if self.variable_type == enums.WorkflowNodeInputType.IGNORE and self.is_required:
raise ValueError('input_mapping', 'Invalid input mapping. The variable type cannot be IGNORE if is_required is True.')
if isinstance(self.variable_type, str):
self.variable_type = enums.WorkflowNodeInputType(self.variable_type)

def to_dict(self):
return {
Expand All @@ -76,7 +81,6 @@ def to_dict(self):
'variable_source': self.variable_source,
'source_prop': self.source_prop or self.name,
'is_required': self.is_required,
'default_value': self.default_value
}

@classmethod
Expand All @@ -90,7 +94,6 @@ def from_dict(cls, mapping: dict):
variable_source=mapping.get('variable_source'),
source_prop=mapping.get('source_prop') or mapping['name'] if mapping.get('variable_source') else None,
is_required=mapping.get('is_required', True),
default_value=mapping.get('default_value')
)


Expand Down Expand Up @@ -219,6 +222,30 @@ def from_dict(cls, schema: dict):
)


@validate_constructor_arg_types('trigger_config')
@dataclasses.dataclass
class TriggerConfig(ApiClass):
"""
Represents the configuration for a trigger workflow node.
Args:
sleep_time (int): The time in seconds to wait before the node gets executed again.
"""
sleep_time: int = dataclasses.field(default=None)

def to_dict(self):
return {
'sleep_time': self.sleep_time
}

@classmethod
def from_dict(cls, configs: dict):
validate_input_dict_param(configs, friendly_class_name='trigger_config')
return cls(
sleep_time=configs.get('sleep_time', None)
)


@validate_constructor_arg_types('workflow_graph_node')
@dataclasses.dataclass
class WorkflowGraphNode(ApiClass):
Expand All @@ -236,10 +263,12 @@ class WorkflowGraphNode(ApiClass):
Additional Attributes:
function_name (str): The name of the function.
source_code (str): The source code of the function.
trigger_config (TriggerConfig): The configuration for a trigger workflow node.
"""

def __init__(self, name: str, input_mappings: Union[Dict[str, WorkflowNodeInputMapping], List[WorkflowNodeInputMapping]] = None, output_mappings: Union[List[str], Dict[str, str], List[WorkflowNodeOutputMapping]] = None, function: callable = None, function_name: str = None, source_code: str = None, input_schema: Union[List[str], WorkflowNodeInputSchema] = None, output_schema: Union[List[str], WorkflowNodeOutputSchema] = None, template_metadata: dict = None):
def __init__(self, name: str, input_mappings: Union[Dict[str, WorkflowNodeInputMapping], List[WorkflowNodeInputMapping]] = None, output_mappings: Union[List[str], Dict[str, str], List[WorkflowNodeOutputMapping]] = None, function: callable = None, function_name: str = None, source_code: str = None, input_schema: Union[List[str], WorkflowNodeInputSchema] = None, output_schema: Union[List[str], WorkflowNodeOutputSchema] = None, template_metadata: dict = None, trigger_config: TriggerConfig = None):
self.template_metadata = template_metadata
self.trigger_config = trigger_config
if self.template_metadata and not self.template_metadata.get('initialized'):
self.name = name
self.function_name = None
Expand Down Expand Up @@ -286,14 +315,14 @@ def __init__(self, name: str, input_mappings: Union[Dict[str, WorkflowNodeInputM
raise ValueError('workflow_graph_node', f'Invalid input mapping. Argument "{input_name}" not found in function "{self.function_name}".')
for arg, default in arg_defaults.items():
if arg not in input_mapping_args:
self.input_mappings.append(WorkflowNodeInputMapping(name=arg, variable_type=enums.WorkflowNodeInputType.USER_INPUT, is_required=default is None, default_value=default.value if default else None))
self.input_mappings.append(WorkflowNodeInputMapping(name=arg, variable_type=enums.WorkflowNodeInputType.USER_INPUT, is_required=default is None))
elif isinstance(input_mappings, Dict) and all(isinstance(key, str) and isinstance(value, WorkflowNodeInputMapping) for key, value in input_mappings.items()):
is_shortform_input_mappings = True
self.input_mappings = [WorkflowNodeInputMapping(name=arg, variable_type=enums.WorkflowNodeInputType.USER_INPUT, is_required=default is None, default_value=default.value if default else None) for arg, default in arg_defaults.items() if arg not in input_mappings]
self.input_mappings = [WorkflowNodeInputMapping(name=arg, variable_type=enums.WorkflowNodeInputType.USER_INPUT, is_required=default is None) for arg, default in arg_defaults.items() if arg not in input_mappings]
for key, value in input_mappings.items():
if key not in arg_defaults:
raise ValueError('workflow_graph_node', f'Invalid input mapping. Argument "{key}" not found in function "{self.function_name}".')
self.input_mappings.append(WorkflowNodeInputMapping(name=key, variable_type=value.variable_type, variable_source=value.variable_source, source_prop=value.source_prop, is_required=arg_defaults.get(key) is None, default_value=value.default_value))
self.input_mappings.append(WorkflowNodeInputMapping(name=key, variable_type=value.variable_type, variable_source=value.variable_source, source_prop=value.source_prop, is_required=arg_defaults.get(key) is None))
else:
raise ValueError('workflow_graph_node', 'Invalid input mappings. Must be a list of WorkflowNodeInputMapping or a dictionary of input mappings in the form {arg_name: node_name.outputs.prop_name}.')

Expand Down Expand Up @@ -336,8 +365,8 @@ def __init__(self, name: str, input_mappings: Union[Dict[str, WorkflowNodeInputM
raise ValueError('workflow_graph_node', 'Invalid output schema. Must be a WorkflowNodeOutputSchema or a list of output section names.')

@classmethod
def _raw_init(cls, name: str, input_mappings: List[WorkflowNodeInputMapping] = None, output_mappings: List[WorkflowNodeOutputMapping] = None, function: callable = None, function_name: str = None, source_code: str = None, input_schema: WorkflowNodeInputSchema = None, output_schema: WorkflowNodeOutputSchema = None, template_metadata: dict = None):
workflow_node = cls.__new__(cls, name, input_mappings, output_mappings, input_schema, output_schema, template_metadata)
def _raw_init(cls, name: str, input_mappings: List[WorkflowNodeInputMapping] = None, output_mappings: List[WorkflowNodeOutputMapping] = None, function: callable = None, function_name: str = None, source_code: str = None, input_schema: WorkflowNodeInputSchema = None, output_schema: WorkflowNodeOutputSchema = None, template_metadata: dict = None, trigger_config: TriggerConfig = None):
workflow_node = cls.__new__(cls, name, input_mappings, output_mappings, input_schema, output_schema, template_metadata, trigger_config)
workflow_node.name = name
if function:
workflow_node.function = function
Expand All @@ -353,6 +382,7 @@ def _raw_init(cls, name: str, input_mappings: List[WorkflowNodeInputMapping] = N
workflow_node.input_schema = input_schema
workflow_node.output_schema = output_schema
workflow_node.template_metadata = template_metadata
workflow_node.trigger_config = trigger_config
return workflow_node

@classmethod
Expand All @@ -362,7 +392,7 @@ def from_template(cls, template_name: str, name: str, configs: dict = None, inpu
if isinstance(input_mappings, List) and all(isinstance(input, WorkflowNodeInputMapping) for input in input_mappings):
instance_input_mappings = input_mappings
elif isinstance(input_mappings, Dict) and all(isinstance(key, str) and isinstance(value, WorkflowNodeInputMapping) for key, value in input_mappings.items()):
instance_input_mappings = [WorkflowNodeInputMapping(name=arg, variable_type=mapping.variable_type, variable_source=mapping.variable_source, source_prop=mapping.source_prop, is_required=mapping.is_required, default_value=mapping.default_value) for arg, mapping in input_mappings]
instance_input_mappings = [WorkflowNodeInputMapping(name=arg, variable_type=mapping.variable_type, variable_source=mapping.variable_source, source_prop=mapping.source_prop, is_required=mapping.is_required) for arg, mapping in input_mappings]
elif input_mappings is None:
instance_input_mappings = []
else:
Expand Down Expand Up @@ -410,13 +440,17 @@ def to_dict(self):
'output_mappings': [mapping.to_dict() for mapping in self.output_mappings],
'input_schema': self.input_schema.to_dict(),
'output_schema': self.output_schema.to_dict(),
'template_metadata': self.template_metadata
'template_metadata': self.template_metadata,
'trigger_config': self.trigger_config.to_dict() if self.trigger_config else None
}

@classmethod
def from_dict(cls, node: dict):
validate_input_dict_param(node, friendly_class_name='workflow_graph_node', must_contain=['name', 'function_name', 'source_code'])
_cls = cls._raw_init if node.get('__return_filter') else cls
if node.get('template_metadata') and node.get('template_metadata').get('template_type') == 'trigger':
if not node.get('trigger_config'):
node['trigger_config'] = {'sleep_time': node.get('template_metadata').get('sleep_time')}
instance = _cls(
name=node['name'],
function_name=node['function_name'],
Expand All @@ -425,7 +459,8 @@ def from_dict(cls, node: dict):
output_mappings=[WorkflowNodeOutputMapping.from_dict(mapping) for mapping in node.get('output_mappings', [])],
input_schema=WorkflowNodeInputSchema.from_dict(node.get('input_schema', {})),
output_schema=WorkflowNodeOutputSchema.from_dict(node.get('output_schema', {})),
template_metadata=node.get('template_metadata')
template_metadata=node.get('template_metadata'),
trigger_config=TriggerConfig.from_dict(node.get('trigger_config')) if node.get('trigger_config') else None
)
return instance

Expand Down
7 changes: 5 additions & 2 deletions abacusai/api_class/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,11 @@ class DocumentProcessingConfig(ApiClass):

def __post_init__(self):
self.ocr_mode = self._detect_ocr_mode()
if self.document_type is not None and DocumentType.is_ocr_forced(self.document_type):
self.highlight_relevant_text = True
if self.document_type is not None:
if DocumentType.is_ocr_forced(self.document_type):
self.highlight_relevant_text = True
else:
self.highlight_relevant_text = False
if self.highlight_relevant_text is not None:
self.extract_bounding_boxes = self.highlight_relevant_text # Highlight_relevant text acts as a wrapper over extract_bounding_boxes

Expand Down
14 changes: 14 additions & 0 deletions abacusai/api_class/dataset_application_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,19 @@ def __post_init__(self):
self.application_connector_type = enums.ApplicationConnectorType.CONFLUENCE


@dataclasses.dataclass
class BoxDatasetConfig(ApplicationConnectorDatasetConfig):
"""
Dataset config for Box Application Connector
Args:
location (str): The regex location of the files to fetch
"""
location: str = dataclasses.field(default=None)

def __post_init__(self):
self.application_connector_type = enums.ApplicationConnectorType.BOX


@dataclasses.dataclass
class GoogleAnalyticsDatasetConfig(ApplicationConnectorDatasetConfig):
"""
Expand Down Expand Up @@ -217,4 +230,5 @@ class _ApplicationConnectorDatasetConfigFactory(_ApiClassFactory):
enums.ApplicationConnectorType.ABACUSUSAGEMETRICS: AbacusUsageMetricsDatasetConfig,
enums.ApplicationConnectorType.FRESHSERVICE: FreshserviceDatasetConfig,
enums.ApplicationConnectorType.TEAMSSCRAPER: TeamsScraperDatasetConfig,
enums.ApplicationConnectorType.BOX: BoxDatasetConfig,
}
3 changes: 3 additions & 0 deletions abacusai/api_class/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -411,6 +411,7 @@ class ApplicationConnectorType(ApiEnum):
TEAMSSCRAPER = 'TEAMSSCRAPER'
GITHUBUSER = 'GITHUBUSER'
OKTASAML = 'OKTASAML'
BOX = 'BOX'


class StreamingConnectorType(ApiEnum):
Expand Down Expand Up @@ -482,6 +483,7 @@ class LLMName(ApiEnum):
ABACUS_SMAUG3 = 'ABACUS_SMAUG3'
ABACUS_DRACARYS = 'ABACUS_DRACARYS'
QWEN_2_5_32B = 'QWEN_2_5_32B'
QWQ_32B = 'QWQ_32B'
GEMINI_1_5_FLASH = 'GEMINI_1_5_FLASH'
XAI_GROK = 'XAI_GROK'

Expand Down Expand Up @@ -549,6 +551,7 @@ class WorkflowNodeInputType(ApiEnum):
# Duplicated in reainternal.enums, both should be kept in sync
USER_INPUT = 'USER_INPUT'
WORKFLOW_VARIABLE = 'WORKFLOW_VARIABLE'
IGNORE = 'IGNORE'


class WorkflowNodeOutputType(ApiEnum):
Expand Down
2 changes: 2 additions & 0 deletions abacusai/api_class/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -681,6 +681,7 @@ class TimeseriesAnomalyTrainingConfig(TrainingConfig):
anomaly_type (TimeseriesAnomalyTypeOfAnomaly): select what kind of peaks to detect as anomalies
hyperparameter_calculation_with_heuristics (TimeseriesAnomalyUseHeuristic): Enable heuristic calculation to get hyperparameters for the model
threshold_score (float): Threshold score for anomaly detection
additional_anomaly_ids (List[str]): List of categorical columns that can act as multi-identifier
"""
type_of_split: enums.TimeseriesAnomalyDataSplitType = dataclasses.field(default=None)
test_start: str = dataclasses.field(default=None)
Expand All @@ -692,6 +693,7 @@ class TimeseriesAnomalyTrainingConfig(TrainingConfig):
anomaly_type: enums.TimeseriesAnomalyTypeOfAnomaly = dataclasses.field(default=None)
hyperparameter_calculation_with_heuristics: enums.TimeseriesAnomalyUseHeuristic = dataclasses.field(default=None)
threshold_score: float = dataclasses.field(default=None)
additional_anomaly_ids: List[str] = dataclasses.field(default=None)

def __post_init__(self):
self.problem_type = enums.ProblemType.TS_ANOMALY
Expand Down
39 changes: 39 additions & 0 deletions abacusai/chatllm_computer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
from .return_class import AbstractApiClass


class ChatllmComputer(AbstractApiClass):
"""
ChatLLMComputer
Args:
client (ApiClient): An authenticated API Client instance
computerId (int): The computer id.
token (str): The token.
vncEndpoint (str): The VNC endpoint.
"""

def __init__(self, client, computerId=None, token=None, vncEndpoint=None):
super().__init__(client, None)
self.computer_id = computerId
self.token = token
self.vnc_endpoint = vncEndpoint
self.deprecated_keys = {}

def __repr__(self):
repr_dict = {f'computer_id': repr(self.computer_id), f'token': repr(
self.token), f'vnc_endpoint': repr(self.vnc_endpoint)}
class_name = "ChatllmComputer"
repr_str = ',\n '.join([f'{key}={value}' for key, value in repr_dict.items(
) if getattr(self, key, None) is not None and key not in self.deprecated_keys])
return f"{class_name}({repr_str})"

def to_dict(self):
"""
Get a dict representation of the parameters in this class
Returns:
dict: The dict value representation of the class parameters
"""
resp = {'computer_id': self.computer_id,
'token': self.token, 'vnc_endpoint': self.vnc_endpoint}
return {key: value for key, value in resp.items() if value is not None and key not in self.deprecated_keys}
Loading

0 comments on commit 39bd61e

Please sign in to comment.