ai2_kit.core.executor module

ai2_kit.core.executor module#

class ai2_kit.core.executor.BaseExecutorConfig(*, ssh: Optional[SshConfig] = None, queue_system: QueueSystemConfig, work_dir: str, python_cmd: str = 'python')[source]#

Bases: BaseModel

model_computed_fields: ClassVar[dict[str, ComputedFieldInfo]] = {}#

A dictionary of computed field names and their corresponding ComputedFieldInfo objects.

model_config: ClassVar[ConfigDict] = {'extra': 'forbid'}#

Configuration for the model, should be a dictionary conforming to [ConfigDict][pydantic.config.ConfigDict].

model_fields: ClassVar[dict[str, FieldInfo]] = {'python_cmd': FieldInfo(annotation=str, required=False, default='python'), 'queue_system': FieldInfo(annotation=QueueSystemConfig, required=True), 'ssh': FieldInfo(annotation=Union[SshConfig, NoneType], required=False, default=None), 'work_dir': FieldInfo(annotation=str, required=True)}#

Metadata about the fields defined on the model, mapping of field names to [FieldInfo][pydantic.fields.FieldInfo].

This replaces Model.__fields__ from Pydantic V1.

python_cmd: str#
queue_system: QueueSystemConfig#
ssh: Optional[SshConfig]#
work_dir: str#
class ai2_kit.core.executor.Executor[source]#

Bases: ABC

abstract download(from_path: str, to_dir: str) str[source]#
abstract dump_text(text: str, path: str)[source]#
abstract glob(pattern: str) List[str][source]#
init()[source]#
abstract load_text(path: str) str[source]#
abstract mkdir(path: str)[source]#
name: str#
python_cmd: str#
abstract resolve_artifact(artifact: Artifact) List[str][source]#
abstract run(script: str, **kwargs) Result[source]#
abstract run_python_fn(fn: FnType, python_cmd=None, cwd=None) FnType[source]#
abstract run_python_script(script: str, python_cmd=None)[source]#
setup_workspace(workspace_dir: str, dirs: List[str])[source]#
abstract submit(script: str, **kwargs) JobFuture[source]#
tmp_dir: str#
abstract upload(from_path: str, to_dir: str) str[source]#
work_dir: str#
class ai2_kit.core.executor.ExecutorManager(executor_configs: Mapping[str, BaseExecutorConfig])[source]#

Bases: object

get_executor(name: str)[source]#
class ai2_kit.core.executor.HpcExecutor(connector: BaseConnector, queue_system: BaseQueueSystem, work_dir: str, python_cmd: str, name: str)[source]#

Bases: Executor

download(from_path: str, to_dir: str)[source]#
dump_text(text: str, path: str)[source]#
classmethod from_config(config: Union[dict, BaseExecutorConfig], name: str = '')[source]#
glob(pattern: str)[source]#
init()[source]#
property is_local#
load_text(path: str) str[source]#
mkdir(path: str)[source]#
name: str#
python_cmd: str#
resolve_artifact(artifact: Artifact) List[str][source]#
run(script: str, **kwargs)[source]#
run_python_fn(fn: FnType, python_cmd=None, cwd=None) FnType[source]#
run_python_script(script: str, python_cmd=None, cwd=None)[source]#
submit(script: str, cwd: str, **kwargs)[source]#
tmp_dir: str#
upload(from_path: str, to_dir: str)[source]#
upload_python_pkg(pkg: str)[source]#

upload python package to remote server

work_dir: str#
ai2_kit.core.executor.create_executor(config: BaseExecutorConfig, name: str) Executor[source]#
ai2_kit.core.executor.fn_to_script(fn: Callable, args, kwargs, delimiter='@')[source]#
ai2_kit.core.executor.pickle_converts(obj, pickle_module='cp', bz2_module='bz2', base64_module='base64')[source]#

convert an object to its pickle string form