aiida.engine#

Package Contents#

Classes#

Functions#

Data#

External#

ProcessState

if_

return_

while_

API#

class aiida.engine.AiiDAPersister#

Bases: plumpy.persistence.Persister

save_checkpoint(process: aiida.engine.processes.process.Process, tag: Optional[str] = None)#
load_checkpoint(pid: Hashable, tag: Optional[str] = None) plumpy.persistence.Bundle#
get_checkpoints()#
get_process_checkpoints(pid: Hashable)#
delete_checkpoint(pid: Hashable, tag: Optional[str] = None) None#
delete_process_checkpoints(pid: Hashable)#
class aiida.engine.Awaitable#

Bases: plumpy.utils.AttributesDict

class aiida.engine.AwaitableAction(*args, **kwds)#

Bases: enum.Enum

Initialization

ASSIGN = 'assign'#
APPEND = 'append'#
class aiida.engine.AwaitableTarget(*args, **kwds)#

Bases: enum.Enum

Initialization

PROCESS = 'process'#
class aiida.engine.BaseRestartWorkChain(*args, **kwargs)#

Bases: aiida.engine.processes.workchains.workchain.WorkChain

Initialization

_process_class: Optional[Type[aiida.engine.processes.Process]] = None#
_considered_handlers_extra = 'considered_handlers'#
property process_class: Type[aiida.engine.processes.process.Process]#
classmethod define(spec: aiida.engine.processes.ProcessSpec) None#
setup() None#
should_run_process() bool#
run_process() aiida.engine.processes.workchains.context.ToContext#
inspect_process() Optional[aiida.engine.processes.ExitCode]#
get_outputs(node) Mapping[str, aiida.orm.Node]#
results() Optional[aiida.engine.processes.ExitCode]#
classmethod is_process_handler(process_handler_name: Union[str, types.FunctionType]) bool#
classmethod get_process_handlers() List[types.FunctionType]#
get_process_handlers_by_priority() List[Tuple[int, types.FunctionType]]#
on_terminated()#
_wrap_bare_dict_inputs(port_namespace: aiida.engine.processes.PortNamespace, inputs: Dict[str, Any]) aiida.common.AttributeDict#
class aiida.engine.CalcJob(*args, **kwargs)#

Bases: aiida.engine.processes.process.Process

Initialization

_node_class = None#
_spec_class = None#
classmethod define(spec: aiida.engine.processes.process_spec.CalcJobProcessSpec) None#
spec_options()#
classmethod get_importer(entry_point_name: str | None = None) aiida.engine.processes.calcjobs.importer.CalcJobImporter#
property options: aiida.common.AttributeDict#
classmethod get_state_classes() Dict[Hashable, Type[plumpy.process_states.State]]#
property node: aiida.orm.CalcJobNode#
on_terminated() None#
run() Union[plumpy.process_states.Stop, int, plumpy.process_states.Wait]#
abstract prepare_for_submission(folder: aiida.common.folders.Folder) aiida.common.datastructures.CalcInfo#
_setup_metadata(metadata: dict) None#
_setup_inputs() None#
_perform_dry_run()#
_perform_import()#
parse(retrieved_temporary_folder: Optional[str] = None, existing_exit_code: aiida.engine.processes.exit_code.ExitCode | None = None) aiida.engine.processes.exit_code.ExitCode#
static terminate(exit_code: aiida.engine.processes.exit_code.ExitCode) aiida.engine.processes.exit_code.ExitCode#
parse_scheduler_output(retrieved: aiida.orm.Node) Optional[aiida.engine.processes.exit_code.ExitCode]#
parse_retrieved_output(retrieved_temporary_folder: Optional[str] = None) Optional[aiida.engine.processes.exit_code.ExitCode]#
presubmit(folder: aiida.common.folders.Folder) aiida.common.datastructures.CalcInfo#
class aiida.engine.CalcJobImporter#

Bases: abc.ABC

abstract static parse_remote_data(remote_data: aiida.orm.RemoteData, **kwargs) Dict[str, Union[aiida.orm.Node, Dict]]#
class aiida.engine.CalcJobOutputPort(*args, **kwargs)#

Bases: plumpy.ports.OutputPort

Initialization

property pass_to_parser: bool#
class aiida.engine.CalcJobProcessSpec#

Bases: aiida.engine.processes.process_spec.ProcessSpec

Initialization

OUTPUT_PORT_TYPE = None#
property default_output_node: Optional[str]#
class aiida.engine.DaemonClient(profile: aiida.manage.configuration.profile.Profile)#

Initialization

DAEMON_ERROR_NOT_RUNNING = 'daemon-error-not-running'#
DAEMON_ERROR_TIMEOUT = 'daemon-error-timeout'#
_DAEMON_NAME = 'aiida-{name}'#
_ENDPOINT_PROTOCOL = None#
property profile: aiida.manage.configuration.profile.Profile#
property daemon_name: str#
property _verdi_bin: str#
cmd_start_daemon(number_workers: int = 1, foreground: bool = False) list[str]#
property cmd_start_daemon_worker: list[str]#
property loglevel: str#
property virtualenv: str | None#
property circus_log_file: str#
property circus_pid_file: str#
property circus_port_file: str#
property circus_socket_file: str#
property circus_socket_endpoints: dict[str, str]#
property daemon_log_file: str#
property daemon_pid_file: str#
get_circus_port() int#
static get_env() dict[str, str]#
get_circus_socket_directory() str#
get_daemon_pid() int | None#
property is_daemon_running: bool#
delete_circus_socket_directory() None#
classmethod get_available_port()#
get_controller_endpoint()#
get_pubsub_endpoint()#
get_stats_endpoint()#
get_ipc_endpoint(endpoint)#
get_tcp_endpoint(port=None)#
get_client() circus.client.CircusClient#
call_client(command: aiida.engine.daemon.client.JsonDictType) aiida.engine.daemon.client.JsonDictType#
get_status() aiida.engine.daemon.client.JsonDictType#
get_numprocesses() aiida.engine.daemon.client.JsonDictType#
get_worker_info() aiida.engine.daemon.client.JsonDictType#
get_daemon_info() aiida.engine.daemon.client.JsonDictType#
increase_workers(number: int) aiida.engine.daemon.client.JsonDictType#
decrease_workers(number: int) aiida.engine.daemon.client.JsonDictType#
stop_daemon(wait: bool = True, timeout: int = 5) aiida.engine.daemon.client.JsonDictType#
restart_daemon(wait: bool) aiida.engine.daemon.client.JsonDictType#
start_daemon(number_workers: int = 1, foreground: bool = False, timeout: int = 5) None#
static _await_condition(condition: Callable, exception: Exception, timeout: int = 5, interval: float = 0.1)#
_start_daemon(number_workers: int = 1, foreground: bool = False) None#
class aiida.engine.ExitCode#

Bases: typing.NamedTuple

status: int = 0#
message: Optional[str] = None#
invalidates_cache: bool = False#
format(**kwargs: str) aiida.engine.processes.exit_code.ExitCode#
class aiida.engine.ExitCodesNamespace(dictionary=None)#

Bases: aiida.common.extendeddicts.AttributeDict

Initialization

__call__(identifier: Union[int, str]) aiida.engine.processes.exit_code.ExitCode#
class aiida.engine.FunctionProcess(*args, **kwargs)#

Bases: aiida.engine.processes.process.Process

Initialization

_func_args: Sequence[str] = ()#
static _func(*_args, **_kwargs) dict#
static build(func: Callable[..., Any], node_class: Type[aiida.orm.ProcessNode]) Type[aiida.engine.processes.functions.FunctionProcess]#
classmethod validate_inputs(*args: Any, **kwargs: Any) None#
classmethod create_inputs(*args: Any, **kwargs: Any) Dict[str, Any]#
classmethod args_to_dict(*args: Any) Dict[str, Any]#
classmethod get_or_create_db_record() aiida.orm.ProcessNode#
property process_class: Callable[..., Any]#
execute() Optional[Dict[str, Any]]#
_setup_db_record() None#
run() Optional[aiida.engine.processes.exit_code.ExitCode]#
class aiida.engine.InputPort(*args, **kwargs)#

Bases: aiida.engine.processes.ports.WithSerialize, aiida.engine.processes.ports.WithNonDb, plumpy.ports.InputPort

Initialization

get_description() Dict[str, str]#
class aiida.engine.InterruptableFuture(*, loop=None)#

Bases: asyncio.Future

Initialization

interrupt(reason: Exception) None#
async with_interrupt(coro: Awaitable[Any]) Any#
class aiida.engine.JobManager(transport_queue: aiida.engine.transports.TransportQueue)#

Initialization

get_jobs_list(authinfo: aiida.orm.AuthInfo) aiida.engine.processes.calcjobs.manager.JobsList#
request_job_info_update(authinfo: aiida.orm.AuthInfo, job_id: Hashable) Iterator[asyncio.Future[JobInfo]]#
class aiida.engine.JobsList(authinfo: aiida.orm.AuthInfo, transport_queue: aiida.engine.transports.TransportQueue, last_updated: Optional[float] = None)#

Initialization

property logger: logging.Logger#
get_minimum_update_interval() float#
property last_updated: Optional[float]#
async _get_jobs_from_scheduler() Dict[Hashable, aiida.schedulers.datastructures.JobInfo]#
async _update_job_info() None#
request_job_info_update(job_id: Hashable) Iterator[asyncio.Future[JobInfo]]#
_ensure_updating() None#
static _has_job_state_changed(old: Optional[aiida.schedulers.datastructures.JobInfo], new: Optional[aiida.schedulers.datastructures.JobInfo]) bool#
_get_next_update_delay() float#
_update_requests_outstanding() bool#
_get_jobs_with_scheduler() List[str]#
class aiida.engine.ObjectLoader#

Bases: plumpy.loaders.DefaultObjectLoader

load_object(identifier: str) Any#
aiida.engine.OutputPort = None#
aiida.engine.PORT_NAMESPACE_SEPARATOR = '__'#
exception aiida.engine.PastException#

Bases: aiida.common.exceptions.AiidaException

Initialization

class aiida.engine.PortNamespace(*args, **kwargs)#

Bases: aiida.engine.processes.ports.WithNonDb, plumpy.ports.PortNamespace

Initialization

__setitem__(key: str, port: plumpy.ports.Port) None#
static validate_port_name(port_name: str) None#
serialize(mapping: Optional[Dict[str, Any]], breadcrumbs: Sequence[str] = ()) Optional[Dict[str, Any]]#
class aiida.engine.Process(inputs: Optional[Dict[str, Any]] = None, logger: Optional[logging.Logger] = None, runner: Optional[aiida.engine.runners.Runner] = None, parent_pid: Optional[int] = None, enable_persistence: bool = True)#

Bases: plumpy.processes.Process

Initialization

_node_class = None#
_spec_class = None#
SINGLE_OUTPUT_LINKNAME: str = 'result'#
class SaveKeys(*args, **kwds)#

Bases: enum.Enum

Initialization

CALC_ID: str = 'calc_id'#
classmethod spec() aiida.engine.processes.process_spec.ProcessSpec#
classmethod define(spec: aiida.engine.processes.process_spec.ProcessSpec) None#
classmethod get_builder() aiida.engine.processes.builder.ProcessBuilder#
classmethod get_or_create_db_record() aiida.orm.ProcessNode#
init() None#
classmethod get_exit_statuses(exit_code_labels: Iterable[str]) List[int]#
exit_codes() aiida.engine.processes.exit_code.ExitCodesNamespace#
spec_metadata() aiida.engine.processes.ports.PortNamespace#
property node: aiida.orm.ProcessNode#
property uuid: str#
property metadata: aiida.common.extendeddicts.AttributeDict#
_save_checkpoint() None#
save_instance_state(out_state: MutableMapping[str, Any], save_context: Optional[plumpy.persistence.LoadSaveContext]) None#
get_provenance_inputs_iterator() Iterator[Tuple[str, Union[aiida.engine.processes.ports.InputPort, aiida.engine.processes.ports.PortNamespace]]]#
load_instance_state(saved_state: MutableMapping[str, Any], load_context: plumpy.persistence.LoadSaveContext) None#
kill(msg: Union[str, None] = None) Union[bool, plumpy.futures.Future]#
out(output_port: str, value: Any = None) None#
out_many(out_dict: Dict[str, Any]) None#
on_create() None#
on_entered(from_state: Optional[plumpy.process_states.State]) None#
on_terminated() None#
on_except(exc_info: Tuple[Any, Exception, types.TracebackType]) None#
on_finish(result: Union[int, aiida.engine.processes.exit_code.ExitCode], successful: bool) None#
on_paused(msg: Optional[str] = None) None#
on_playing() None#
on_output_emitting(output_port: str, value: Any) None#
set_status(status: Optional[str]) None#
submit(process: Type[aiida.engine.processes.process.Process], **kwargs) aiida.orm.ProcessNode#
property runner: aiida.engine.runners.Runner#
get_parent_calc() Optional[aiida.orm.ProcessNode]#
classmethod build_process_type() str#
report(msg: str, *args, **kwargs) None#
_create_and_setup_db_record() Union[int, uuid.UUID]#
encode_input_args(inputs: Dict[str, Any]) str#
decode_input_args(encoded: str) Dict[str, Any]#
update_outputs() None#
_build_process_label() str#
_setup_db_record() None#
_setup_version_info() None#
_setup_metadata(metadata: dict) None#
_setup_inputs() None#
_flat_inputs() Dict[str, Any]#
_flat_outputs() Dict[str, Any]#
_flatten_inputs(port: Union[None, aiida.engine.processes.ports.InputPort, aiida.engine.processes.ports.PortNamespace], port_value: Any, parent_name: str = '', separator: str = PORT_NAMESPACE_SEPARATOR) List[Tuple[str, Any]]#
_flatten_outputs(port: Union[None, aiida.engine.processes.ports.OutputPort, aiida.engine.processes.ports.PortNamespace], port_value: Any, parent_name: str = '', separator: str = PORT_NAMESPACE_SEPARATOR) List[Tuple[str, Any]]#
exposed_inputs(process_class: Type[aiida.engine.processes.process.Process], namespace: Optional[str] = None, agglomerate: bool = True) aiida.common.extendeddicts.AttributeDict#
exposed_outputs(node: aiida.orm.ProcessNode, process_class: Type[aiida.engine.processes.process.Process], namespace: Optional[str] = None, agglomerate: bool = True) aiida.common.extendeddicts.AttributeDict#
static _get_namespace_list(namespace: Optional[str] = None, agglomerate: bool = True) List[Optional[str]]#
classmethod is_valid_cache(node: aiida.orm.ProcessNode) bool#
class aiida.engine.ProcessBuilder(process_class: Type[aiida.engine.processes.process.Process])#

Bases: aiida.engine.processes.builder.ProcessBuilderNamespace

Initialization

property process_class: Type[aiida.engine.processes.process.Process]#
_repr_pretty_(p, _) str#
class aiida.engine.ProcessBuilderNamespace(port_namespace: aiida.engine.processes.ports.PortNamespace)#

Bases: collections.abc.MutableMapping

Initialization

__setattr__(attr: str, value: Any) None#
__repr__()#
__dir__()#
__iter__()#
__len__()#
__getitem__(item)#
__setitem__(item, value)#
__delitem__(item)#
__delattr__(item)#
_recursive_merge(dictionary, key, value)#
_merge(*args, **kwds)#
_update(*args, **kwds)#
_inputs(prune: bool = False) dict#
_prune(value)#
class aiida.engine.ProcessFuture(pk: int, loop: Optional[asyncio.AbstractEventLoop] = None, poll_interval: Union[None, int, float] = None, communicator: Optional[kiwipy.Communicator] = None)#

Bases: asyncio.Future

Initialization

_filtered = None#
cleanup() None#
async _poll_process(node: aiida.orm.Node, poll_interval: Union[int, float]) None#
class aiida.engine.ProcessHandlerReport#

Bases: typing.NamedTuple

do_break: bool = False#
exit_code: aiida.engine.processes.exit_code.ExitCode = None#
class aiida.engine.ProcessSpec#

Bases: plumpy.process_spec.ProcessSpec

Initialization

METADATA_KEY: str = 'metadata'#
METADATA_OPTIONS_KEY: str = 'options'#
INPUT_PORT_TYPE = None#
PORT_NAMESPACE_TYPE = None#
property metadata_key: str#
property options_key: str#
property exit_codes: aiida.engine.processes.exit_code.ExitCodesNamespace#
exit_code(status: int, label: str, message: str, invalidates_cache: bool = False) None#
property ports: aiida.engine.processes.ports.PortNamespace#
property inputs: aiida.engine.processes.ports.PortNamespace#
property outputs: aiida.engine.processes.ports.PortNamespace#
class aiida.engine.Runner(poll_interval: Union[int, float] = 0, loop: Optional[asyncio.AbstractEventLoop] = None, communicator: Optional[kiwipy.Communicator] = None, rmq_submit: bool = False, persister: Optional[plumpy.persistence.Persister] = None)#

Initialization

_persister: Optional[plumpy.persistence.Persister] = None#
_communicator: Optional[kiwipy.Communicator] = None#
_controller: Optional[plumpy.process_comms.RemoteProcessThreadController] = None#
_closed: bool = False#
__enter__() aiida.engine.runners.Runner#
__exit__(exc_type, exc_val, exc_tb)#
property loop: asyncio.AbstractEventLoop#
property transport: aiida.engine.transports.TransportQueue#
property persister: Optional[plumpy.persistence.Persister]#
property communicator: Optional[kiwipy.Communicator]#
property plugin_version_provider: aiida.plugins.utils.PluginVersionProvider#
property job_manager: aiida.engine.processes.calcjobs.manager.JobManager#
property controller: Optional[plumpy.process_comms.RemoteProcessThreadController]#
property is_daemon_runner: bool#
is_closed() bool#
start() None#
stop() None#
run_until_complete(future: asyncio.Future) Any#
close() None#
instantiate_process(process: aiida.engine.runners.TYPE_RUN_PROCESS, **inputs)#
submit(process: aiida.engine.runners.TYPE_SUBMIT_PROCESS, **inputs: Any)#
schedule(process: aiida.engine.runners.TYPE_SUBMIT_PROCESS, *args: Any, **inputs: Any) aiida.orm.ProcessNode#
_run(process: aiida.engine.runners.TYPE_RUN_PROCESS, *args: Any, **inputs: Any) Tuple[Dict[str, Any], aiida.orm.ProcessNode]#
run(process: aiida.engine.runners.TYPE_RUN_PROCESS, *args: Any, **inputs: Any) Dict[str, Any]#
run_get_node(process: aiida.engine.runners.TYPE_RUN_PROCESS, *args: Any, **inputs: Any) aiida.engine.runners.ResultAndNode#
run_get_pk(process: aiida.engine.runners.TYPE_RUN_PROCESS, *args: Any, **inputs: Any) aiida.engine.runners.ResultAndPk#
call_on_process_finish(pk: int, callback: Callable[[], Any]) None#
get_process_future(pk: int) aiida.engine.processes.futures.ProcessFuture#
_poll_process(node, callback)#
aiida.engine.ToContext = None#
class aiida.engine.WithNonDb(*args, **kwargs)#

Initialization

property non_db_explicitly_set: bool#
property non_db: bool#
class aiida.engine.WithSerialize(*args, **kwargs)#

Initialization

serialize(value: Any) aiida.orm.Data#
class aiida.engine.WorkChain(inputs: dict | None = None, logger: logging.Logger | None = None, runner: aiida.engine.runners.Runner | None = None, enable_persistence: bool = True)#

Bases: aiida.engine.processes.process.Process

Initialization

_node_class = None#
_spec_class = None#
_STEPPER_STATE = 'stepper_state'#
_CONTEXT = 'CONTEXT'#
classmethod spec() aiida.engine.processes.workchains.workchain.WorkChainSpec#
property node: aiida.orm.WorkChainNode#
property ctx: aiida.common.extendeddicts.AttributeDict#
save_instance_state(out_state, save_context)#
load_instance_state(saved_state, load_context)#
on_run()#
_resolve_nested_context(key: str) tuple[aiida.common.extendeddicts.AttributeDict, str]#
_insert_awaitable(awaitable: aiida.engine.processes.workchains.awaitable.Awaitable) None#
_resolve_awaitable(awaitable: aiida.engine.processes.workchains.awaitable.Awaitable, value: Any) None#
to_context(**kwargs: aiida.engine.processes.workchains.awaitable.Awaitable | aiida.orm.ProcessNode) None#
_update_process_status() None#
run() Any#
_do_step() Any#
_store_nodes(data: Any) None#
on_exiting() None#
on_wait(awaitables: Sequence[aiida.engine.processes.workchains.awaitable.Awaitable])#
_action_awaitables() None#
_on_awaitable_finished(awaitable: aiida.engine.processes.workchains.awaitable.Awaitable) None#
aiida.engine.append_(target: Union[aiida.engine.processes.workchains.awaitable.Awaitable, aiida.orm.ProcessNode]) aiida.engine.processes.workchains.awaitable.Awaitable#
aiida.engine.assign_(target: Union[aiida.engine.processes.workchains.awaitable.Awaitable, aiida.orm.ProcessNode]) aiida.engine.processes.workchains.awaitable.Awaitable#
aiida.engine.calcfunction(function: aiida.engine.processes.functions.FunctionType) aiida.engine.processes.functions.FunctionType#
aiida.engine.construct_awaitable(target: Union[aiida.engine.processes.workchains.awaitable.Awaitable, aiida.orm.ProcessNode]) aiida.engine.processes.workchains.awaitable.Awaitable#
aiida.engine.get_object_loader() aiida.engine.persistence.ObjectLoader#
aiida.engine.interruptable_task(coro: Callable[[aiida.engine.utils.InterruptableFuture], Awaitable[Any]], loop: Optional[asyncio.AbstractEventLoop] = None) aiida.engine.utils.InterruptableFuture#
aiida.engine.is_process_function(function: Any) bool#
aiida.engine.process_handler(wrapped: Optional[types.FunctionType] = None, *, priority: int = 0, exit_codes: Union[None, aiida.engine.processes.exit_code.ExitCode, List[aiida.engine.processes.exit_code.ExitCode]] = None, enabled: bool = True) types.FunctionType#
aiida.engine.run(process: aiida.engine.launch.TYPE_RUN_PROCESS, *args: Any, **inputs: Any) Dict[str, Any]#
aiida.engine.run_get_node(process: aiida.engine.launch.TYPE_RUN_PROCESS, *args: Any, **inputs: Any) Tuple[Dict[str, Any], aiida.orm.ProcessNode]#
aiida.engine.run_get_pk(process: aiida.engine.launch.TYPE_RUN_PROCESS, *args: Any, **inputs: Any) Tuple[Dict[str, Any], int]#
aiida.engine.submit(process: aiida.engine.launch.TYPE_SUBMIT_PROCESS, **inputs: Any) aiida.orm.ProcessNode#
aiida.engine.workfunction(function: aiida.engine.processes.functions.FunctionType) aiida.engine.processes.functions.FunctionType#