Skip to content

Commit b0d009f

Browse files
Apply automated fixes from Ruff linter (#308)
1 parent eb92d49 commit b0d009f

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

46 files changed

+363
-351
lines changed

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,4 +3,4 @@ repos:
33
rev: v0.12.3
44
hooks:
55
- id: ruff
6-
args: ["--exit-zero"]
6+
args: ["--exit-zero"]

setup.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
import setuptools
22
import os
33

4-
version = os.environ['CONDUCTOR_PYTHON_VERSION']
4+
version = os.environ["CONDUCTOR_PYTHON_VERSION"]
55
if version is None:
6-
version = '0.0.0-SNAPSHOT'
6+
version = "0.0.0-SNAPSHOT"
77

88
setuptools.setup(
99
version=version,

src/conductor/client/ai/integrations.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -19,21 +19,21 @@ def __init__(self, api_key: str, endpoint: str, classname: str) -> None:
1919

2020
def to_dict(self) -> dict:
2121
return {
22-
'api_key': self.api_key,
23-
'endpoint': self.endpoint
22+
"api_key": self.api_key,
23+
"endpoint": self.endpoint
2424
}
2525

2626

2727
class OpenAIConfig(IntegrationConfig):
2828

2929
def __init__(self, api_key: Optional[str] = None) -> None:
3030
if api_key is None:
31-
api_key = os.getenv('OPENAI_API_KEY')
31+
api_key = os.getenv("OPENAI_API_KEY")
3232
self.api_key = api_key
3333

3434
def to_dict(self) -> dict:
3535
return {
36-
'api_key': self.api_key
36+
"api_key": self.api_key
3737
}
3838

3939

@@ -45,38 +45,38 @@ def __init__(self, api_key: str, endpoint: str) -> None:
4545

4646
def to_dict(self) -> dict:
4747
return {
48-
'api_key': self.api_key,
49-
'endpoint': self.endpoint
48+
"api_key": self.api_key,
49+
"endpoint": self.endpoint
5050
}
5151

5252

5353
class PineconeConfig(IntegrationConfig):
5454

5555
def __init__(self, api_key: Optional[str] = None, endpoint: Optional[str] = None, environment: Optional[str] = None, project_name: Optional[str] = None) -> None:
5656
if api_key is None:
57-
self.api_key = os.getenv('PINECONE_API_KEY')
57+
self.api_key = os.getenv("PINECONE_API_KEY")
5858
else:
5959
self.api_key = api_key
6060

6161
if endpoint is None:
62-
self.endpoint = os.getenv('PINECONE_ENDPOINT')
62+
self.endpoint = os.getenv("PINECONE_ENDPOINT")
6363
else:
6464
self.endpoint = endpoint
6565

6666
if environment is None:
67-
self.environment = os.getenv('PINECONE_ENV')
67+
self.environment = os.getenv("PINECONE_ENV")
6868
else:
6969
self.environment = environment
7070

7171
if project_name is None:
72-
self.project_name = os.getenv('PINECONE_PROJECT')
72+
self.project_name = os.getenv("PINECONE_PROJECT")
7373
else:
7474
self.project_name = project_name
7575

7676
def to_dict(self) -> dict:
7777
return {
78-
'api_key': self.api_key,
79-
'endpoint': self.endpoint,
80-
'projectName': self.project_name,
81-
'environment': self.environment
78+
"api_key": self.api_key,
79+
"endpoint": self.endpoint,
80+
"projectName": self.project_name,
81+
"environment": self.environment
8282
}

src/conductor/client/ai/orchestrator.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
NOT_FOUND_STATUS = 404
2020

2121
class AIOrchestrator:
22-
def __init__(self, api_configuration: Configuration, prompt_test_workflow_name: str = '') -> Self:
22+
def __init__(self, api_configuration: Configuration, prompt_test_workflow_name: str = "") -> Self:
2323
orkes_clients = OrkesClients(api_configuration)
2424

2525
self.integration_client = orkes_clients.get_integration_client()
@@ -28,8 +28,8 @@ def __init__(self, api_configuration: Configuration, prompt_test_workflow_name:
2828
self.prompt_client = orkes_clients.get_prompt_client()
2929

3030
self.prompt_test_workflow_name = prompt_test_workflow_name
31-
if self.prompt_test_workflow_name == '':
32-
self.prompt_test_workflow_name = 'prompt_test_' + str(uuid4())
31+
if self.prompt_test_workflow_name == "":
32+
self.prompt_test_workflow_name = "prompt_test_" + str(uuid4())
3333

3434
def add_prompt_template(self, name: str, prompt_template: str, description: str):
3535
self.prompt_client.save_prompt(name, description, prompt_template)
@@ -62,7 +62,7 @@ def add_ai_integration(self, ai_integration_name: str, provider: LLMProvider, mo
6262
details = IntegrationUpdate()
6363
details.configuration = config.to_dict()
6464
details.type = provider.value
65-
details.category = 'AI_MODEL'
65+
details.category = "AI_MODEL"
6666
details.enabled = True
6767
details.description = description
6868
existing_integration = self.integration_client.get_integration(integration_name=ai_integration_name)
@@ -81,7 +81,7 @@ def add_vector_store(self, db_integration_name: str, provider: VectorDB, indices
8181
vector_db = IntegrationUpdate()
8282
vector_db.configuration = config.to_dict()
8383
vector_db.type = provider.value
84-
vector_db.category = 'VECTOR_DB'
84+
vector_db.category = "VECTOR_DB"
8585
vector_db.enabled = True
8686
if description is None:
8787
description = db_integration_name

src/conductor/client/automator/task_handler.py

Lines changed: 28 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -23,23 +23,23 @@
2323
_mp_fork_set = False
2424
if not _mp_fork_set:
2525
try:
26-
if platform == 'win32':
27-
set_start_method('spawn')
26+
if platform == "win32":
27+
set_start_method("spawn")
2828
else:
29-
set_start_method('fork')
29+
set_start_method("fork")
3030
_mp_fork_set = True
3131
except Exception as e:
32-
logger.info(f'error when setting multiprocessing.set_start_method - maybe the context is set {e.args}')
32+
logger.info(f"error when setting multiprocessing.set_start_method - maybe the context is set {e.args}")
3333
if platform == "darwin":
34-
os.environ['no_proxy'] = '*'
34+
os.environ["no_proxy"] = "*"
3535

3636
def register_decorated_fn(name: str, poll_interval: int, domain: str, worker_id: str, func):
37-
logger.info(f'decorated {name}')
37+
logger.info(f"decorated {name}")
3838
_decorated_functions[(name, domain)] = {
39-
'func': func,
40-
'poll_interval': poll_interval,
41-
'domain': domain,
42-
'worker_id': worker_id
39+
"func": func,
40+
"poll_interval": poll_interval,
41+
"domain": domain,
42+
"worker_id": worker_id
4343
}
4444

4545

@@ -56,11 +56,11 @@ def __init__(
5656
self.logger_process, self.queue = _setup_logging_queue(configuration)
5757

5858
# imports
59-
importlib.import_module('conductor.client.http.models.task')
60-
importlib.import_module('conductor.client.worker.worker_task')
59+
importlib.import_module("conductor.client.http.models.task")
60+
importlib.import_module("conductor.client.worker.worker_task")
6161
if import_modules is not None:
6262
for module in import_modules:
63-
logger.info(f'loading module {module}')
63+
logger.info(f"loading module {module}")
6464
importlib.import_module(module)
6565

6666
elif not isinstance(workers, list):
@@ -77,12 +77,12 @@ def __init__(
7777
worker_id=worker_id,
7878
domain=domain,
7979
poll_interval=poll_interval)
80-
logger.info(f'created worker with name={task_def_name} and domain={domain}')
80+
logger.info(f"created worker with name={task_def_name} and domain={domain}")
8181
workers.append(worker)
8282

8383
self.__create_task_runner_processes(workers, configuration, metrics_settings)
8484
self.__create_metrics_provider_process(metrics_settings)
85-
logger.info('TaskHandler initialized')
85+
logger.info("TaskHandler initialized")
8686

8787
def __enter__(self):
8888
return self
@@ -93,24 +93,24 @@ def __exit__(self, exc_type, exc_value, traceback):
9393
def stop_processes(self) -> None:
9494
self.__stop_task_runner_processes()
9595
self.__stop_metrics_provider_process()
96-
logger.info('Stopped worker processes...')
96+
logger.info("Stopped worker processes...")
9797
self.queue.put(None)
9898
self.logger_process.terminate()
9999

100100
def start_processes(self) -> None:
101-
logger.info('Starting worker processes...')
101+
logger.info("Starting worker processes...")
102102
freeze_support()
103103
self.__start_task_runner_processes()
104104
self.__start_metrics_provider_process()
105-
logger.info('Started all processes')
105+
logger.info("Started all processes")
106106

107107
def join_processes(self) -> None:
108108
try:
109109
self.__join_task_runner_processes()
110110
self.__join_metrics_provider_process()
111-
logger.info('Joined all processes')
111+
logger.info("Joined all processes")
112112
except KeyboardInterrupt:
113-
logger.info('KeyboardInterrupt: Stopping all processes')
113+
logger.info("KeyboardInterrupt: Stopping all processes")
114114
self.stop_processes()
115115

116116
def __create_metrics_provider_process(self, metrics_settings: MetricsSettings) -> None:
@@ -121,7 +121,7 @@ def __create_metrics_provider_process(self, metrics_settings: MetricsSettings) -
121121
target=MetricsCollector.provide_metrics,
122122
args=(metrics_settings,)
123123
)
124-
logger.info('Created MetricsProvider process')
124+
logger.info("Created MetricsProvider process")
125125

126126
def __create_task_runner_processes(
127127
self,
@@ -149,25 +149,25 @@ def __start_metrics_provider_process(self):
149149
if self.metrics_provider_process is None:
150150
return
151151
self.metrics_provider_process.start()
152-
logger.info('Started MetricsProvider process')
152+
logger.info("Started MetricsProvider process")
153153

154154
def __start_task_runner_processes(self):
155155
n = 0
156156
for task_runner_process in self.task_runner_processes:
157157
task_runner_process.start()
158158
n = n + 1
159-
logger.info(f'Started {n} TaskRunner process')
159+
logger.info(f"Started {n} TaskRunner process")
160160

161161
def __join_metrics_provider_process(self):
162162
if self.metrics_provider_process is None:
163163
return
164164
self.metrics_provider_process.join()
165-
logger.info('Joined MetricsProvider processes')
165+
logger.info("Joined MetricsProvider processes")
166166

167167
def __join_task_runner_processes(self):
168168
for task_runner_process in self.task_runner_processes:
169169
task_runner_process.join()
170-
logger.info('Joined TaskRunner processes')
170+
logger.info("Joined TaskRunner processes")
171171

172172
def __stop_metrics_provider_process(self):
173173
self.__stop_process(self.metrics_provider_process)
@@ -180,12 +180,12 @@ def __stop_process(self, process: Process):
180180
if process is None:
181181
return
182182
try:
183-
logger.debug(f'Terminating process: {process.pid}')
183+
logger.debug(f"Terminating process: {process.pid}")
184184
process.terminate()
185185
except Exception as e:
186-
logger.debug(f'Failed to terminate process: {process.pid}, reason: {e}')
186+
logger.debug(f"Failed to terminate process: {process.pid}, reason: {e}")
187187
process.kill()
188-
logger.debug(f'Killed process: {process.pid}')
188+
logger.debug(f"Killed process: {process.pid}")
189189

190190

191191
# Setup centralized logging queue

0 commit comments

Comments
 (0)