ai2_kit.domain.deepmd module

ai2_kit.domain.deepmd module#

class ai2_kit.domain.deepmd.CllDeepmdContext(path_prefix: str, resource_manager: ai2_kit.core.resource_manager.ResourceManager, config: ai2_kit.domain.deepmd.CllDeepmdContextConfig)[source]#

Bases: BaseCllContext

config: CllDeepmdContextConfig#
class ai2_kit.domain.deepmd.CllDeepmdContextConfig(*, script_template: BashTemplate, dp_cmd: str = 'dp', concurrency: int = 5, multi_gpus_per_job: bool = False)[source]#

Bases: BaseModel

concurrency: int#
dp_cmd: str#
model_computed_fields: ClassVar[dict[str, ComputedFieldInfo]] = {}#

A dictionary of computed field names and their corresponding ComputedFieldInfo objects.

model_config: ClassVar[ConfigDict] = {'extra': 'forbid'}#

Configuration for the model, should be a dictionary conforming to [ConfigDict][pydantic.config.ConfigDict].

model_fields: ClassVar[dict[str, FieldInfo]] = {'concurrency': FieldInfo(annotation=int, required=False, default=5), 'dp_cmd': FieldInfo(annotation=str, required=False, default='dp'), 'multi_gpus_per_job': FieldInfo(annotation=bool, required=False, default=False), 'script_template': FieldInfo(annotation=BashTemplate, required=True)}#

Metadata about the fields defined on the model, mapping of field names to [FieldInfo][pydantic.fields.FieldInfo].

This replaces Model.__fields__ from Pydantic V1.

multi_gpus_per_job: bool#
script_template: BashTemplate#
class ai2_kit.domain.deepmd.CllDeepmdInput(config: ai2_kit.domain.deepmd.CllDeepmdInputConfig, mode: Literal['default', 'dpff', 'fep-redox', 'fep-pka'], type_map: List[str], sel_type: Optional[List[int]], old_dataset: List[ai2_kit.core.artifact.Artifact], new_dataset: List[ai2_kit.core.artifact.Artifact], previous: List[ai2_kit.core.artifact.Artifact])[source]#

Bases: object

config: CllDeepmdInputConfig#
mode: Literal['default', 'dpff', 'fep-redox', 'fep-pka']#
new_dataset: List[Artifact]#
old_dataset: List[Artifact]#
previous: List[Artifact]#
sel_type: Optional[List[int]]#
type_map: List[str]#
class ai2_kit.domain.deepmd.CllDeepmdInputConfig(*, train_dw: Optional[DwTraningConfig] = None, model_num: int = 4, init_dataset: List[str] = [], input_template: dict = {}, compress_model: bool = False, pretrained_model: Optional[str] = None, isolate_outliers: bool = False, outlier_f_cutoff: float = 10.0, outlier_weight: float = 0.003, fixture_models: List[str] = [], group_by_formula: bool = False, init_from_previous: bool = False, input_modifier_fn: Optional[str] = None, ignore_error: bool = False, dp_train_opts: str = '')[source]#

Bases: BaseModel

class DwTraningConfig(*, input_template: dict)[source]#

Bases: BaseModel

Options for deep wannier model training

input_template: dict#

Deepmd input template. Ref: https://docs.deepmodeling.com/projects/deepmd/en/master/model/dplr.html

model_computed_fields: ClassVar[dict[str, ComputedFieldInfo]] = {}#

A dictionary of computed field names and their corresponding ComputedFieldInfo objects.

model_config: ClassVar[ConfigDict] = {'extra': 'forbid'}#

Configuration for the model, should be a dictionary conforming to [ConfigDict][pydantic.config.ConfigDict].

model_fields: ClassVar[dict[str, FieldInfo]] = {'input_template': FieldInfo(annotation=dict, required=True)}#

Metadata about the fields defined on the model, mapping of field names to [FieldInfo][pydantic.fields.FieldInfo].

This replaces Model.__fields__ from Pydantic V1.

compress_model: bool#

Whether to compress model after training.

dp_train_opts: str#

Extra options for dp train command

fixture_models: List[str]#

Fixture models used to initialize training, support glob pattern. If this is not empty, then the whole training process will be skipped. This feature is useful for debugging, or explore more structures without training. The models should be on the remote executor. The name fixture is used as the concept of fixture in pytest.

group_by_formula: bool#

Grouping dataset by formula If this is enabled, then the dataset will be grouped by formula. Otherwise, the dataset will be grouped by ancestor.

Set this to True when you have multiple structures with the same ancestor.

ignore_error: bool#

Ignore non critical errors.

init_dataset: List[str]#

Dataset used to initialize training.

init_from_previous: bool#

Use the previous models to initialize the current training, which can speed up the training process.

input_modifier_fn: Optional[str]#

A python function to modify the input data. The function should take a input dict as input and return a dict.

def input_modifier_fn(input: dict) -> dict:

… return new_input

input_template: dict#

Deepmd input template.

isolate_outliers: bool#

If isolate_outliers is enabled, then outlier data will be separated from training data.

model_computed_fields: ClassVar[dict[str, ComputedFieldInfo]] = {}#

A dictionary of computed field names and their corresponding ComputedFieldInfo objects.

model_config: ClassVar[ConfigDict] = {'extra': 'forbid'}#

Configuration for the model, should be a dictionary conforming to [ConfigDict][pydantic.config.ConfigDict].

model_fields: ClassVar[dict[str, FieldInfo]] = {'compress_model': FieldInfo(annotation=bool, required=False, default=False), 'dp_train_opts': FieldInfo(annotation=str, required=False, default=''), 'fixture_models': FieldInfo(annotation=List[str], required=False, default=[]), 'group_by_formula': FieldInfo(annotation=bool, required=False, default=False), 'ignore_error': FieldInfo(annotation=bool, required=False, default=False), 'init_dataset': FieldInfo(annotation=List[str], required=False, default=[]), 'init_from_previous': FieldInfo(annotation=bool, required=False, default=False), 'input_modifier_fn': FieldInfo(annotation=Union[str, NoneType], required=False, default=None), 'input_template': FieldInfo(annotation=dict, required=False, default={}), 'isolate_outliers': FieldInfo(annotation=bool, required=False, default=False), 'model_num': FieldInfo(annotation=int, required=False, default=4), 'outlier_f_cutoff': FieldInfo(annotation=float, required=False, default=10.0), 'outlier_weight': FieldInfo(annotation=float, required=False, default=0.003), 'pretrained_model': FieldInfo(annotation=Union[str, NoneType], required=False, default=None), 'train_dw': FieldInfo(annotation=Union[CllDeepmdInputConfig.DwTraningConfig, NoneType], required=False, default=None)}#

Metadata about the fields defined on the model, mapping of field names to [FieldInfo][pydantic.fields.FieldInfo].

This replaces Model.__fields__ from Pydantic V1.

model_num: int#

Total number of models to train.

outlier_f_cutoff: float#

The threshold of force magnitude to determine whether a data is outlier.

outlier_weight: float#

The weight of outlier data in training data.

pretrained_model: Optional[str]#

Pretrained model used to finetune.

train_dw: Optional[DwTraningConfig]#

Options for deep wannier model training, if None, then deep wannier model will not be trained.

class ai2_kit.domain.deepmd.GenericDeepmdOutput(models: List[ai2_kit.core.artifact.Artifact], dataset: List[ai2_kit.core.artifact.Artifact])[source]#

Bases: ICllTrainOutput

dataset: List[Artifact]#
get_mlp_models() List[Artifact][source]#
get_training_dataset() List[Artifact][source]#
models: List[Artifact]#
async ai2_kit.domain.deepmd.cll_deepmd(input: CllDeepmdInput, ctx: CllDeepmdContext)[source]#
ai2_kit.domain.deepmd.make_deepmd_dataset(dataset_dir: str, outlier_dir: str, raw_data_collection: List[ArtifactDict], isolate_outliers: bool, outlier_f_cutoff: float, type_map: List[str], deepmd_input_template: dict, group_by_formula: bool, mode: str, sel_type: Optional[List[int]], ignore_error: bool)[source]#
ai2_kit.domain.deepmd.make_deepmd_input(input_template: dict, type_map: List[str], train_systems: List[str], outlier_systems: List[str], validation_systems: List[str], isolate_outliers: bool, outlier_weight: float)[source]#
ai2_kit.domain.deepmd.make_deepmd_task_dirs(input_template: dict, model_num: int, type_map: List[str], train_systems: List[str], outlier_systems: List[str], validation_systems: List[str], isolate_outliers: bool, outlier_weight: float, dw_input_template: Optional[dict], base_dir: str, input_modifier_fn: Optional[str])[source]#