Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Start of Node namespace reorganization #5472

Merged
merged 4 commits into from
Apr 6, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/ci-code.yml
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,7 @@ jobs:

- name: Run test suite
env:
AIIDA_WARN_v3: 1
SQLALCHEMY_WARN_20: 1
run:
.github/workflows/tests.sh
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/test-install.yml
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,7 @@ jobs:

- name: Run test suite
env:
AIIDA_WARN_v3: 1
SQLALCHEMY_WARN_20: 1
run:
.github/workflows/tests.sh
Expand Down
1 change: 1 addition & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ repos:
aiida/orm/nodes/data/jsonable.py|
aiida/orm/nodes/node.py|
aiida/orm/nodes/process/.*py|
aiida/orm/nodes/repository.py|
aiida/orm/utils/links.py|
aiida/plugins/entry_point.py|
aiida/plugins/factories.py|
Expand Down
4 changes: 2 additions & 2 deletions aiida/cmdline/commands/cmd_calcjob.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def calcjob_inputcat(calcjob, path):

try:
# When we `cat`, it makes sense to directly send the output to stdout as it is
with calcjob.open(path, mode='rb') as fhandle:
with calcjob.base.repository.open(path, mode='rb') as fhandle:
copyfileobj(fhandle, sys.stdout.buffer)
except OSError as exception:
# The sepcial case is breakon pipe error, which is usually OK.
Expand Down Expand Up @@ -163,7 +163,7 @@ def calcjob_outputcat(calcjob, path):

try:
# When we `cat`, it makes sense to directly send the output to stdout as it is
with retrieved.open(path, mode='rb') as fhandle:
with retrieved.base.repository.open(path, mode='rb') as fhandle:
copyfileobj(fhandle, sys.stdout.buffer)
except OSError as exception:
# The sepcial case is breakon pipe error, which is usually OK.
Expand Down
6 changes: 3 additions & 3 deletions aiida/cmdline/commands/cmd_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def repo_cat(node, relative_path):
import sys

try:
with node.open(relative_path, mode='rb') as fhandle:
with node.base.repository.open(relative_path, mode='rb') as fhandle:
copyfileobj(fhandle, sys.stdout.buffer)
except OSError as exception:
# The sepcial case is breakon pipe error, which is usually OK.
Expand Down Expand Up @@ -96,7 +96,7 @@ def _copy_tree(key, output_dir): # pylint: disable=too-many-branches
Recursively copy the content at the ``key`` path in the given node to
the ``output_dir``.
"""
for file in node.list_objects(key):
for file in node.base.repository.list_objects(key):
# Not using os.path.join here, because this is the "path"
# in the AiiDA node, not an actual OS - level path.
file_key = file.name if not key else f'{key}/{file.name}'
Expand All @@ -110,7 +110,7 @@ def _copy_tree(key, output_dir): # pylint: disable=too-many-branches
assert file.file_type == FileType.FILE
out_file_path = output_dir / file.name
assert not out_file_path.exists()
with node.open(file_key, 'rb') as in_file:
with node.base.repository.open(file_key, 'rb') as in_file:
with out_file_path.open('wb') as out_file:
shutil.copyfileobj(in_file, out_file)

Expand Down
2 changes: 1 addition & 1 deletion aiida/cmdline/utils/repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,6 @@ def list_repository_contents(node, path, color):
"""
from aiida.repository import FileType

for entry in node.list_objects(path):
for entry in node.base.repository.list_objects(path):
bold = bool(entry.file_type == FileType.DIRECTORY)
echo.echo(entry.name, bold=bold, fg='blue' if color and entry.file_type == FileType.DIRECTORY else None)
8 changes: 3 additions & 5 deletions aiida/common/json.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,11 @@
"""
import codecs
import json
import warnings

from aiida.common.warnings import AiidaDeprecationWarning
from aiida.common.warnings import warn_deprecation

warnings.warn(
'This module has been deprecated and should no longer be used. Use the `json` standard library instead.',
AiidaDeprecationWarning
warn_deprecation(
'This module has been deprecated and should no longer be used. Use the `json` standard library instead.', version=3
)


Expand Down
16 changes: 16 additions & 0 deletions aiida/common/warnings.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
# For further information please visit http://www.aiida.net #
###########################################################################
"""Define warnings that can be thrown by AiiDA."""
import os
import warnings


class AiidaDeprecationWarning(Warning):
Expand All @@ -32,3 +34,17 @@ class AiidaTestWarning(Warning):
"""
Class for warnings concerning the AiiDA testing infrastructure.
"""


def warn_deprecation(message: str, version: int, stacklevel=2) -> None:
"""Warns about a deprecation for a future aiida-core version.

Warnings are activated if the `AIIDA_WARN_v{major}` environment variable is set to `True`.

:param message: the message to be printed
:param version: the major version number of the future version
:param stacklevel: the stack level at which the warning is issued
"""
if os.environ.get(f'AIIDA_WARN_v{version}'):
message = f'{message} (this will be removed in v{version})'
warnings.warn(message, AiidaDeprecationWarning, stacklevel=stacklevel)
18 changes: 9 additions & 9 deletions aiida/engine/daemon/execmanager.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,12 +175,12 @@ def upload_calculation(
for code in input_codes:
if code.is_local():
# Note: this will possibly overwrite files
for filename in code.list_object_names():
for filename in code.base.repository.list_object_names():
# Note, once #2579 is implemented, use the `node.open` method instead of the named temporary file in
# combination with the new `Transport.put_object_from_filelike`
# Since the content of the node could potentially be binary, we read the raw bytes and pass them on
with NamedTemporaryFile(mode='wb+') as handle:
handle.write(code.get_object_content(filename, mode='rb'))
handle.write(code.base.repository.get_object_content(filename, mode='rb'))
handle.flush()
transport.put(handle.name, filename)
transport.chmod(code.get_local_executable(), 0o755) # rwxr-xr-x
Expand Down Expand Up @@ -212,14 +212,14 @@ def upload_calculation(
filepath_target = pathlib.Path(folder.abspath) / filename_target
filepath_target.parent.mkdir(parents=True, exist_ok=True)

if data_node.get_object(filename_source).file_type == FileType.DIRECTORY:
if data_node.base.repository.get_object(filename_source).file_type == FileType.DIRECTORY:
# If the source object is a directory, we copy its entire contents
data_node.copy_tree(filepath_target, filename_source)
provenance_exclude_list.extend(data_node.list_object_names(filename_source))
data_node.base.repository.copy_tree(filepath_target, filename_source)
provenance_exclude_list.extend(data_node.base.repository.list_object_names(filename_source))
else:
# Otherwise, simply copy the file
with folder.open(target, 'wb') as handle:
with data_node.open(filename, 'rb') as source:
with data_node.base.repository.open(filename, 'rb') as source:
shutil.copyfileobj(source, handle)

provenance_exclude_list.append(target)
Expand Down Expand Up @@ -320,12 +320,12 @@ def upload_calculation(
dirname not in provenance_exclude_list for dirname in dirnames
):
with open(filepath, 'rb') as handle: # type: ignore[assignment]
node._repository.put_object_from_filelike(handle, relpath) # pylint: disable=protected-access
node.base.repository._repository.put_object_from_filelike(handle, relpath) # pylint: disable=protected-access

# Since the node is already stored, we cannot use the normal repository interface since it will raise a
# `ModificationNotAllowed` error. To bypass it, we go straight to the underlying repository instance to store the
# files, however, this means we have to manually update the node's repository metadata.
node._update_repository_metadata() # pylint: disable=protected-access
node.base.repository._update_repository_metadata() # pylint: disable=protected-access

if not dry_run:
# Make sure that attaching the `remote_folder` with a link is the last thing we do. This gives the biggest
Expand Down Expand Up @@ -465,7 +465,7 @@ def retrieve_calculation(calculation: CalcJobNode, transport: Transport, retriev
with SandboxFolder() as folder:
retrieve_files_from_list(calculation, transport, folder.abspath, retrieve_list)
# Here I retrieved everything; now I store them inside the calculation
retrieved_files.put_object_from_tree(folder.abspath)
retrieved_files.base.repository.put_object_from_tree(folder.abspath)

# Retrieve the temporary files in the retrieved_temporary_folder if any files were
# specified in the 'retrieve_temporary_list' key
Expand Down
4 changes: 2 additions & 2 deletions aiida/engine/processes/calcjobs/calcjob.py
Original file line number Diff line number Diff line change
Expand Up @@ -519,7 +519,7 @@ def parse_scheduler_output(self, retrieved: orm.Node) -> Optional[ExitCode]:
self.logger.warning('could not determine `stderr` filename because `scheduler_stderr` option was not set.')
else:
try:
scheduler_stderr = retrieved.get_object_content(filename_stderr)
scheduler_stderr = retrieved.base.repository.get_object_content(filename_stderr)
except FileNotFoundError:
scheduler_stderr = None
self.logger.warning(f'could not parse scheduler output: the `{filename_stderr}` file is missing')
Expand All @@ -528,7 +528,7 @@ def parse_scheduler_output(self, retrieved: orm.Node) -> Optional[ExitCode]:
self.logger.warning('could not determine `stdout` filename because `scheduler_stdout` option was not set.')
else:
try:
scheduler_stdout = retrieved.get_object_content(filename_stdout)
scheduler_stdout = retrieved.base.repository.get_object_content(filename_stdout)
except FileNotFoundError:
scheduler_stdout = None
self.logger.warning(f'could not parse scheduler output: the `{filename_stdout}` file is missing')
Expand Down
7 changes: 4 additions & 3 deletions aiida/manage/configuration/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
from typing import TYPE_CHECKING, Any, Optional
import warnings

from aiida.common.warnings import AiidaDeprecationWarning
from aiida.common.warnings import AiidaDeprecationWarning, warn_deprecation

if TYPE_CHECKING:
from aiida.manage.configuration import Config, Profile # pylint: disable=import-self
Expand Down Expand Up @@ -114,9 +114,10 @@ def _merge_deprecated_cache_yaml(config, filepath):
while not cache_path_backup or os.path.isfile(cache_path_backup):
cache_path_backup = f"{cache_path}.{timezone.now().strftime('%Y%m%d-%H%M%S.%f')}"

warnings.warn(
warn_deprecation(
'cache_config.yml use is deprecated and support will be removed in `v3.0`. Merging into config.json and '
f'moving to: {cache_path_backup}', AiidaDeprecationWarning
f'moving to: {cache_path_backup}',
version=3
)
import yaml
with open(cache_path, 'r', encoding='utf8') as handle:
Expand Down
5 changes: 2 additions & 3 deletions aiida/manage/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
import asyncio
import functools
from typing import TYPE_CHECKING, Any, Optional, Union
from warnings import warn

if TYPE_CHECKING:
from kiwipy.rmq import RmqThreadCommunicator
Expand Down Expand Up @@ -206,8 +205,8 @@ def get_backend(self) -> 'StorageBackend':

Deprecated: use `get_profile_storage` instead.
"""
from aiida.common.warnings import AiidaDeprecationWarning
warn('get_backend() is deprecated, use get_profile_storage() instead', AiidaDeprecationWarning)
from aiida.common.warnings import warn_deprecation
warn_deprecation('get_backend() is deprecated, use get_profile_storage() instead', version=3)
return self.get_profile_storage()

def get_profile_storage(self) -> 'StorageBackend':
Expand Down
5 changes: 2 additions & 3 deletions aiida/manage/tests/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,8 @@
import os
import shutil
import tempfile
import warnings

from aiida.common.warnings import AiidaDeprecationWarning
from aiida.common.warnings import warn_deprecation
from aiida.manage import configuration, get_manager
from aiida.manage.configuration.settings import create_instance_directories
from aiida.manage.external.postgres import Postgres
Expand Down Expand Up @@ -123,7 +122,7 @@ def has_profile_open(self):
return self._manager and self._manager.has_profile_open()

def reset_db(self):
warnings.warn('reset_db() is deprecated, use clear_profile() instead', AiidaDeprecationWarning)
warn_deprecation('reset_db() is deprecated, use clear_profile() instead', version=3)
return self._manager.clear_profile()

def clear_profile(self):
Expand Down
16 changes: 5 additions & 11 deletions aiida/manage/tests/pytest_fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,11 @@
import asyncio
import shutil
import tempfile
import warnings

import pytest

from aiida.common.log import AIIDA_LOGGER
from aiida.common.warnings import AiidaDeprecationWarning
from aiida.common.warnings import warn_deprecation
from aiida.manage.tests import get_test_backend_name, get_test_profile_name, test_manager


Expand Down Expand Up @@ -77,29 +76,24 @@ def clear_database(clear_database_after_test):
@pytest.fixture(scope='function')
def clear_database_after_test(aiida_profile):
"""Clear the database after the test."""
warnings.warn(
'the clear_database_after_test fixture is deprecated, use aiida_profile_clean instead', AiidaDeprecationWarning
)
warn_deprecation('the clear_database_after_test fixture is deprecated, use aiida_profile_clean instead', version=3)
yield aiida_profile
aiida_profile.clear_profile()


@pytest.fixture(scope='function')
def clear_database_before_test(aiida_profile):
"""Clear the database before the test."""
warnings.warn(
'the clear_database_before_test fixture deprecated, use aiida_profile_clean instead', AiidaDeprecationWarning
)
warn_deprecation('the clear_database_before_test fixture deprecated, use aiida_profile_clean instead', version=3)
aiida_profile.clear_profile()
yield aiida_profile


@pytest.fixture(scope='class')
def clear_database_before_test_class(aiida_profile):
"""Clear the database before a test class."""
warnings.warn(
'the clear_database_before_test_class is deprecated, use aiida_profile_clean_class instead',
AiidaDeprecationWarning
warn_deprecation(
'the clear_database_before_test_class is deprecated, use aiida_profile_clean_class instead', version=3
)
aiida_profile.clear_profile()
yield
Expand Down
2 changes: 1 addition & 1 deletion aiida/orm/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@
'Node',
'NodeEntityLoader',
'NodeLinksManager',
'NodeRepositoryMixin',
'NodeRepository',
'NumericType',
'OrbitalData',
'OrderSpecifier',
Expand Down
2 changes: 1 addition & 1 deletion aiida/orm/implementation/storage_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ class StorageBackend(abc.ABC): # pylint: disable=too-many-public-methods
- Searchable data, which is stored in the database and can be queried using the QueryBuilder
- Non-searchable (binary) data, which is stored in the repository and can be loaded using the RepositoryBackend

The two sources are inter-linked by the ``Node.repository_metadata``.
The two sources are inter-linked by the ``Node.base.repository.metadata``.
Once stored, the leaf values of this dictionary must be valid pointers to object keys in the repository.

The class methods,`version_profile` and `migrate`,
Expand Down
2 changes: 1 addition & 1 deletion aiida/orm/nodes/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
'KpointsData',
'List',
'Node',
'NodeRepositoryMixin',
'NodeRepository',
'NumericType',
'OrbitalData',
'ProcessNode',
Expand Down
12 changes: 6 additions & 6 deletions aiida/orm/nodes/data/array/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,11 +45,11 @@ def delete_array(self, name):
:param name: The name of the array to delete from the node.
"""
fname = f'{name}.npy'
if fname not in self.list_object_names():
if fname not in self.base.repository.list_object_names():
raise KeyError(f"Array with name '{name}' not found in node pk= {self.pk}")

# remove both file and attribute
self.delete_object(fname)
self.base.repository.delete_object(fname)
try:
self.delete_attribute(f'{self.array_prefix}{name}')
except (KeyError, AttributeError):
Expand All @@ -71,7 +71,7 @@ def _arraynames_from_files(self):
Return a list of all arrays stored in the node, listing the files (and
not relying on the properties).
"""
return [i[:-4] for i in self.list_object_names() if i.endswith('.npy')]
return [i[:-4] for i in self.base.repository.list_object_names() if i.endswith('.npy')]

def _arraynames_from_properties(self):
"""
Expand Down Expand Up @@ -111,11 +111,11 @@ def get_array_from_file(self, name):
"""Return the array stored in a .npy file"""
filename = f'{name}.npy'

if filename not in self.list_object_names():
if filename not in self.base.repository.list_object_names():
raise KeyError(f'Array with name `{name}` not found in ArrayData<{self.pk}>')

# Open a handle in binary read mode as the arrays are written as binary files as well
with self.open(filename, mode='rb') as handle:
with self.base.repository.open(filename, mode='rb') as handle:
return numpy.load(handle, allow_pickle=False) # pylint: disable=unexpected-keyword-arg

# Return with proper caching if the node is stored, otherwise always re-read from disk
Expand Down Expand Up @@ -171,7 +171,7 @@ def set_array(self, name, array):
handle.seek(0)

# Write the numpy array to the repository, keeping the byte representation
self.put_object_from_filelike(handle, f'{name}.npy')
self.base.repository.put_object_from_filelike(handle, f'{name}.npy')

# Store the array name and shape for querying purposes
self.set_attribute(f'{self.array_prefix}{name}', list(array.shape))
Expand Down
Loading