10000 Version 1.4.22 · mmrech/api_python@39bd61e · GitHub
[go: up one dir, main page]

Skip to content

Commit 39bd61e

Browse files
committed
Version 1.4.22
1 parent 88a57fd commit 39bd61e

File tree

498 files changed

+15668
-4983
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

498 files changed

+15668
-4983
lines changed

abacusai/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
from .categorical_range_violation import CategoricalRangeViolation
2727
from .chat_message import ChatMessage
2828
from .chat_session import ChatSession
29+
from .chatllm_computer import ChatllmComputer
2930
from .chatllm_referral_invite import ChatllmReferralInvite
3031
from .client import AgentResponse, ApiClient, ApiException, ClientOptions, ReadOnlyClient, _request_context
3132
from .code_autocomplete_response import CodeAutocompleteResponse
@@ -225,4 +226,4 @@
225226
from .workflow_node_template import WorkflowNodeTemplate
226227

227228

228-
__version__ = "1.4.21"
229+
__version__ = "1.4.22"

abacusai/api_class/ai_agents.py

Lines changed: 49 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import ast
22
import dataclasses
3-
from typing import Any, Dict, List, Union
3+
from typing import Dict, List, Union
44

55
from . import enums
66
from .abstract import ApiClass, get_clean_function_source_code_for_agent, validate_constructor_arg_types
@@ -55,7 +55,7 @@ class WorkflowNodeInputMapping(ApiClass):
5555
5656
Args:
5757
name (str): The name of the input variable of the node function.
58-
variable_type (WorkflowNodeInputType): The type of the input.
58+
variable_type (Union[WorkflowNodeInputType, str]): The type of the input. If the type is `IGNORE`, the input will be ignored.
5959
variable_source (str): The name of the node this variable is sourced from.
6060
If the type is `WORKFLOW_VARIABLE`, the value given by the source node will be directly used.
6161
If the type is `USER_INPUT`, the value given by the source node will be used as the default initial value before the user edits it.
@@ -67,7 +67,12 @@ class WorkflowNodeInputMapping(ApiClass):
6767
variable_source: str = dataclasses.field(default=None)
6868
source_prop: str = dataclasses.field(default=None)
6969
is_required: bool = dataclasses.field(default=True)
70-
default_value: Any = dataclasses.field(default=None)
70+
71+
def __post_init__(self):
72+
if self.variable_type == enums.WorkflowNodeInputType.IGNORE and self.is_required:
73+
raise ValueError('input_mapping', 'Invalid input mapping. The variable type cannot be IGNORE if is_required is True.')
74+
if isinstance(self.variable_type, str):
75+
self.variable_type = enums.WorkflowNodeInputType(self.variable_type)
7176

7277
def to_dict(self):
7378
return {
@@ -76,7 +81,6 @@ def to_dict(self):
7681
'variable_source': self.variable_source,
7782
'source_prop': self.source_prop or self.name,
7883
'is_required': self.is_required,
79-
'default_value': self.default_value
8084
}
8185

8286
@classmethod
@@ -90,7 +94,6 @@ def from_dict(cls, mapping: dict):
9094
variable_source=mapping.get('variable_source'),
9195
source_prop=mapping.get('source_prop') or mapping['name'] if mapping.get('variable_source') else None,
9296
is_required=mapping.get('is_required', True),
93-
default_value=mapping.get('default_value')
9497
)
9598

9699

@@ -219,6 +222,30 @@ def from_dict(cls, schema: dict):
219222
)
220223

221224

225+
@validate_constructor_arg_types('trigger_config')
226+
@dataclasses.dataclass
227+
class TriggerConfig(ApiClass):
228+
"""
229+
Represents the configuration for a trigger workflow node.
230+
231+
Args:
232+
sleep_time (int): The time in seconds to wait before the node gets executed again.
233+
"""
234+
sleep_time: int = dataclasses.field(default=None)
235+
236+
def to_dict(self):
237+
return {
238+
'sleep_time': self.sleep_time
239+
}
240+
241+
@classmethod
242+
def from_dict(cls, configs: dict):
243+
validate_input_dict_param(configs, friendly_class_name='trigger_config')
244+
return cls(
245+
sleep_time=configs.get('sleep_time', None)
246+
)
247+
248+
222249
@validate_constructor_arg_types('workflow_graph_node')
223250
@dataclasses.dataclass
224251
class WorkflowGraphNode(ApiClass):
@@ -236,10 +263,12 @@ class WorkflowGraphNode(ApiClass):
236263
Additional Attributes:
237264
function_name (str): The name of the function.
238265
source_code (str): The source code of the function.
266+
trigger_config (TriggerConfig): The configuration for a trigger workflow node.
239267
"""
240268

241-
def __init__(self, name: str, input_mappings: Union[Dict[str, WorkflowNodeInputMapping], List[WorkflowNodeInputMapping]] = None, output_mappings: Union[List[str], Dict[str, str], List[WorkflowNodeOutputMapping]] = None, function: callable = None, function_name: str = None, source_code: str = None, input_schema: Union[List[str], WorkflowNodeInputSchema] = None, output_schema: Union[List[str], WorkflowNodeOutputSchema] = None, template_metadata: dict = None):
269+
def __init__(self, name: str, input_mappings: Union[Dict[str, WorkflowNodeInputMapping], List[WorkflowNodeInputMapping]] = None, output_mappings: Union[List[str], Dict[str, str], List[WorkflowNodeOutputMapping]] = None, function: callable = None, function_name: str = None, source_code: str = None, input_schema: Union[List[str], WorkflowNodeInputSchema] = None, output_schema: Union[List[str], WorkflowNodeOutputSchema] = None, template_metadata: dict = None, trigger_config: TriggerConfig = None):
242270
self.template_metadata = template_metadata
271+
self.trigger_config = trigger_config
243272
if self.template_metadata and not self.template_metadata.get('initialized'):
244273
self.name = name
245274
self.function_name = None
@@ -286,14 +315,14 @@ def __init__(self, name: str, input_mappings: Union[Dict[str, WorkflowNodeInputM
286315
raise ValueError('workflow_graph_node', f'Invalid input mapping. Argument "{input_name}" not found in function "{self.function_name}".')
287316
for arg, default in arg_defaults.items():
288317
if arg not in input_mapping_args:
289-
self.input_mappings.append(WorkflowNodeInputMapping(name=arg, variable_type=enums.WorkflowNodeInputType.USER_INPUT, is_required=default is None, default_value=default.value if default else None))
318+
self.input_mappings.append(WorkflowNodeInputMapping(name=arg, variable_type=enums.WorkflowNodeInputType.USER_INPUT, is_required=default is None))
290319
elif isinstance(input_mappings, Dict) and all(isinstance(key, str) and isinstance(value, WorkflowNodeInputMapping) for key, value in input_mappings.items()):
291320
is_shortform_input_mappings = True
292-
self.input_mappings = [WorkflowNodeInputMapping(name=arg, variable_type=enums.WorkflowNodeInputType.USER_INPUT, is_required=default is None, default_value=default.value if default else None) for arg, default in arg_defaults.items() if arg not in input_mappings]
321+
self.input_mappings = [WorkflowNodeInputMapping(name=arg, variable_type=enums.WorkflowNodeInputType.USER_INPUT, is_required=default is None) for arg, default in arg_defaults.items() if arg not in input_mappings]
293322
for key, value in input_mappings.items():
294323
if key not in arg_defaults:
295324
raise ValueError('workflow_graph_node', f'Invalid input mapping. Argument "{key}" not found in function "{self.function_name}".')
296-
self.input_mappings.append(WorkflowNodeInputMapping(name=key, variable_type=value.variable_type, variable_source=value.variable_source, source_prop=value.source_prop, is_required=arg_defaults.get(key) is None, default_value=value.default_value))
325+
self.input_mappings.append(WorkflowNodeInputMapping(name=key, variable_type=value.variable_type, variable_source=value.variable_source, source_prop=value.source_prop, is_required=arg_defaults.get(key) is None))
297326
else:
298327
raise ValueError('workflow_graph_node', 'Invalid input mappings. Must be a list of WorkflowNodeInputMapping or a dictionary of input mappings in the form {arg_name: node_name.outputs.prop_name}.')
299328

@@ -336,8 +365,8 @@ def __init__(self, name: str, input_mappings: Union[Dict[str, WorkflowNodeInputM
336365
raise ValueError('workflow_graph_node', 'Invalid output schema. Must be a WorkflowNodeOutputSchema or a list of output section names.')
337366

338367
@classmethod
339-
def _raw_init(cls, name: str, input_mappings: List[WorkflowNodeInputMapping] = None, output_mappings: List[WorkflowNodeOutputMapping] = None, function: callable = None, function_name: str = None, source_code: str = None, input_schema: WorkflowNodeInputSchema = None, output_schema: WorkflowNodeOutputSchema = None, template_metadata: dict = None):
340-
workflow_node = cls.__new__(cls, name, input_mappings, output_mappings, input_schema, output_schema, template_metadata)
368+
def _raw_init(cls, name: str, input_mappings: List[WorkflowNodeInputMapping] = None, output_mappings: List[WorkflowNodeOutputMapping] = None, function: callable = None, function_name: str = None, source_code: str = None, input_schema: WorkflowNodeInputSchema = None, output_schema: WorkflowNodeOutputSchema = None, template_metadata: dict = None, trigger_config: TriggerConfig = None):
369+
workflow_node = cls.__new__(cls, name, input_mappings, output_mappings, input_schema, output_schema, template_metadata, trigger_config)
341370
workflow_node.name = name
342371
if function:
343372
workflow_node.function = function
@@ -353,6 +382,7 @@ def _raw_init(cls, name: str, input_mappings: List[WorkflowNodeInputMapping] = N
353382
workflow_node.input_schema = input_schema
354383
workflow_node.output_schema = output_schema
355384
workflow_node.template_metadata = template_metadata
385+
workflow_node.trigger_config = trigger_config
356386
return workflow_node
357387

358388
@classmethod
@@ -362,7 +392,7 @@ def from_template(cls, template_name: str, name: str, configs: dict = None, inpu
362392
if isinstance(input_mappings, List) and all(isinstance(input, WorkflowNodeInputMapping) for input in input_mappings):
363393
instance_input_mappings = input_mappings
364394
elif isinstance(input_mappings, Dict) and all(isinstance(key, str) and isinstance(value, WorkflowNodeInputMapping) for key, value in input_mappings.items()):
365-
instance_input_mappings = [WorkflowNodeInputMapping(name=arg, variable_type=mapping.variable_type, variable_source=mapping.variable_source, source_prop=mapping.source_prop, is_required=mapping.is_required, default_value=mapping.default_value) for arg, mapping in input_mappings]
395+
instance_input_mappings = [WorkflowNodeInputMapping(name=arg, variable_type=mapping.variable_type, variable_source=mapping.variable_source, source_prop=mapping.source_prop, is_required=mapping.is_required) for arg, mapping in input_mappings]
366396
elif input_mappings is None:
367397
instance_input_mappings = []
368398
else:
@@ -410,13 +440,17 @@ def to_dict(self):
410440
'output_mappings': [mapping.to_dict() for mapping in self.output_mappings],
411441
'input_schema': self.input_schema.to_dict(),
412442
'output_schema': self.output_schema.to_dict(),
413-
'template_metadata': self.template_metadata
443+
'template_metadata': self.template_metadata,
444+
'trigger_config': self.trigger_config.to_dict() if self.trigger_config else None
414445
}
415446

416447
@classmethod
417448
def from_dict(cls, node: dict):
418449
validate_input_dict_param(node, friendly_class_name='workflow_graph_node', must_contain=['name', 'function_name', 'source_code'])
419450
_cls = cls._raw_init if node.get('__return_filter') else cls
451+
if node.get('template_metadata') and node.get('template_metadata').get('template_type') == 'trigger':
452+
if not node.get('trigger_config'):
453+
node['trigger_config'] = {'sleep_time': node.get('template_metadata').get('sleep_time')}
420454
instance = _cls(
421455
name=node['name'],
422456
function_name=node['function_name'],
@@ -425,7 +459,8 @@ def from_dict(cls, node: dict):
425459
output_mappings=[WorkflowNodeOutputMapping.from_dict(mapping) for mapping in node.get('output_mappings', [])],
426460
input_schema=WorkflowNodeInputSchema.from_dict(node.get('input_schema', {})),
427461
output_schema=WorkflowNodeOutputSchema.from_dict(node.get('output_schema', {})),
428-
template_metadata=node.get('template_metadata')
462+
template_metadata=node.get('template_metadata'),
463+
trigger_config=TriggerConfig.from_dict(node.get('trigger_config')) if node.get('trigger_config') else None
429464
)
430465
return instance
431466

abacusai/api_class/dataset.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,8 +59,11 @@ class DocumentProcessingConfig(ApiClass):
5959

6060
def __post_init__(self):
6161
self.ocr_mode = self._detect_ocr_mode()
62-
if self.document_type is not None and DocumentType.is_ocr_forced(self.document_type):
63-
self.highlight_relevant_text = True
62+
if self.document_type is not None:
63+
if DocumentType.is_ocr_forced(self.document_type):
64+
self.highlight_relevant_text = True
65+
else:
66+
self.highlight_relevant_text = False
6467
if self.highlight_relevant_text is not None:
6568
self.extract_bounding_boxes = self.highlight_relevant_text # Highlight_relevant text acts as a wrapper over extract_bounding_boxes
6669

abacusai/api_class/dataset_application_connector.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,19 @@ def __post_init__(self):
4444
self.application_connector_type = enums.ApplicationConnectorType.CONFLUENCE
4545

4646

47+
@dataclasses.dataclass
48+
class BoxDatasetConfig(ApplicationConnectorDatasetConfig):
49+
"""
50+
Dataset config for Box Application Connector
51+
Args:
52+
location (str): The regex location of the files to fetch
53+
"""
54+
location: str = dataclasses.field(default=None)
55+
56+
def __post_init__(self):
57+
self.application_connector_type = enums.ApplicationConnectorType.BOX
58+
59+
4760
@dataclasses.dataclass
4861
class GoogleAnalyticsDatasetConfig(ApplicationConnectorDatasetConfig):
4962
"""
@@ -217,4 +230,5 @@ class _ApplicationConnectorDatasetConfigFactory(_ApiClassFactory):
217230
enums.ApplicationConnectorType.ABACUSUSAGEMETRICS: AbacusUsageMetricsDatasetConfig,
218231
enums.ApplicationConnectorType.FRESHSERVICE: FreshserviceDatasetConfig,
219232
enums.ApplicationConnectorType.TEAMSSCRAPER: TeamsScraperDatasetConfig,
233+
enums.ApplicationConnectorType.BOX: BoxDatasetConfig,
220234
}

abacusai/api_class/enums.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -411,6 +411,7 @@ class ApplicationConnectorType(ApiEnum):
411411
TEAMSSCRAPER = 'TEAMSSCRAPER'
412412
GITHUBUSER = 'GITHUBUSER'
413413
OKTASAML = 'OKTASAML'
414+
BOX = 'BOX'
414415

415416

416417
class StreamingConnectorType(ApiEnum):
@@ -482,6 +483,7 @@ class LLMName(ApiEnum):
482483
ABACUS_SMAUG3 = 'ABACUS_SMAUG3'
483484
ABACUS_DRACARYS = 'ABACUS_DRACARYS'
484485
QWEN_2_5_32B = 'QWEN_2_5_32B'
486+
QWQ_32B = 'QWQ_32B'
485487
GEMINI_1_5_FLASH = 'GEMINI_1_5_FLASH'
486488
XAI_GROK = 'XAI_GROK'
487489

@@ -549,6 +551,7 @@ class WorkflowNodeInputType(ApiEnum):
549551
# Duplicated in reainternal.enums, both should be kept in sync
550552
USER_INPUT = 'USER_INPUT'
551553
WORKFLOW_VARIABLE = 'WORKFLOW_VARIABLE'
554+
IGNORE = 'IGNORE'
552555

553556

554557
class WorkflowNodeOutputType(ApiEnum):

abacusai/api_class/model.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -681,6 +681,7 @@ class TimeseriesAnomalyTrainingConfig(TrainingConfig):
681681
anomaly_type (TimeseriesAnomalyTypeOfAnomaly): select what kind of peaks to detect as anomalies
682682
hyperparameter_calculation_with_heuristics (TimeseriesAnomalyUseHeuristic): Enable heuristic calculation to get hyperparameters for the model
683683
threshold_score (float): Threshold score for anomaly detection
684+
additional_anomaly_ids (List[str]): List of categorical columns that can act as multi-identifier
684685
"""
685686
type_of_split: enums.TimeseriesAnomalyDataSplitType = dataclasses.field(default=None)
686687
test_start: str = dataclasses.field(default=None)
@@ -692,6 +693,7 @@ class TimeseriesAnomalyTrainingConfig(TrainingConfig):
692693
anomaly_type: enums.TimeseriesAnomalyTypeOfAnomaly = dataclasses.field(default=None)
693694
hyperparameter_calculation_with_heuristics: enums.TimeseriesAnomalyUseHeuristic = dataclasses.field(default=None)
694695
threshold_score: float = dataclasses.field(default=None)
696+
additional_anomaly_ids: List[str] = dataclasses.field(default=None)
695697

696698
def __post_init__(self):
697699
self.problem_type = enums.ProblemType.TS_ANOMALY

abacusai/chatllm_computer.py

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
from .return_class import AbstractApiClass
2+
3+
4+
class ChatllmComputer(AbstractApiClass):
5+
"""
6+
ChatLLMComputer
7+
8+
Args:
9+
client (ApiClient): An authenticated API Client instance
10+
computerId (int): The computer id.
11+
token (str): The token.
12+
vncEndpoint (str): The VNC endpoint.
13+
"""
14+
15+
def __init__(self, client, computerId=None, token=None, vncEndpoint=None):
16+
super().__init__(client, None)
17+
self.computer_id = computerId
18+
self.token = token
19+
self.vnc_endpoint = vncEndpoint
20+
self.deprecated_keys = {}
21+
22+
def __repr__(self):
23+
repr_dict = {f'computer_id': repr(self.computer_id), f'token': repr(
24+
self.token), f'vnc_endpoint': repr(self.vnc_endpoint)}
25+
class_name = "ChatllmComputer"
26+
repr_str = ',\n '.join([f'{key}={value}' for key, value in repr_dict.items(
27+
) if getattr(self, key, None) is not None and key not in self.deprecated_keys])
28+
return f"{class_name}({repr_str})"
29+
30+
def to_dict(self):
31+
"""
32+
Get a dict representation of the parameters in this class
33+
34+
Returns:
35+
dict: The dict value representation of the class parameters
36+
"""
37+
resp = {'computer_id': self.computer_id,
38+
'token': self.token, 'vnc_endpoint': self.vnc_endpoint}
39+
return {key: value for key, value in resp.items() if value is not None and key not in self.deprecated_keys}

0 commit comments

Comments
 (0)
0