From 9335cde352eee4ca7f81676149ac70ef09662fd3 Mon Sep 17 00:00:00 2001 From: Fernando Gargiulo Date: Mon, 21 Nov 2016 19:12:08 +0100 Subject: [PATCH 1/5] Separated nwchem tests. Inverted the inheritance order of the derived class TestDataNodeDjango. --- aiida/backends/djsite/db/subtests/nodes.py | 2 +- aiida/backends/djsite/db/subtests/nwchem.py | 147 ++---------------- .../backends/sqlalchemy/tests/test_runner.py | 5 +- aiida/backends/tests/nwchem.py | 47 +++--- optional_requirements.txt | 2 +- 5 files changed, 39 insertions(+), 164 deletions(-) diff --git a/aiida/backends/djsite/db/subtests/nodes.py b/aiida/backends/djsite/db/subtests/nodes.py index cdd831a8b6..6b458e49a3 100644 --- a/aiida/backends/djsite/db/subtests/nodes.py +++ b/aiida/backends/djsite/db/subtests/nodes.py @@ -15,7 +15,7 @@ __authors__ = "The AiiDA team." -class TestDataNodeDjango(AiidaTestCase, TestDataNode): +class TestDataNodeDjango(TestDataNode, AiidaTestCase, ): """ These tests check the features of Data nodes that differ from the base Node """ diff --git a/aiida/backends/djsite/db/subtests/nwchem.py b/aiida/backends/djsite/db/subtests/nwchem.py index 31bd21b4e3..2c2ce874c8 100644 --- a/aiida/backends/djsite/db/subtests/nwchem.py +++ b/aiida/backends/djsite/db/subtests/nwchem.py @@ -2,150 +2,21 @@ """ Tests for the NWChem input plugins. """ -import os -import tempfile - -from aiida.backends.djsite.db.testbase import AiidaTestCase -from aiida.orm.calculation.job.nwchem.nwcpymatgen import _prepare_pymatgen_dict -from aiida.orm.data.structure import has_ase, has_pymatgen, StructureData -from aiida.orm.data.cif import has_pycifrw -from django.utils import unittest __copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For further information please visit http://www.aiida.net/. All rights reserved." __license__ = "MIT license, see LICENSE.txt file." __version__ = "0.7.0" __authors__ = "The AiiDA team." -class TestNwchem(AiidaTestCase): - - @unittest.skipIf((not has_ase()) or (not has_pymatgen()), - "Unable to import ASE and pymatgen") - def test_1(self): - from ase import Atoms - - par = { - 'directives': [ - ['set nwpw:minimizer', '2'], - ['set nwpw:psi_nolattice', '.true.'], - ['set includestress', '.true.'] - ], - 'geometry_options': [ - 'units', - 'au', - 'center', - 'noautosym', - 'noautoz', - 'print' - ], - 'memory_options': [], - 'symmetry_options': [], - 'tasks': [ - { - 'alternate_directives': { - 'driver': {'clear': '', 'maxiter': 40}, - 'nwpw': {'ewald_ncut': 8, 'simulation_cell': '\n ngrid 16 16 16\n end'} - }, - 'basis_set': {}, - 'basis_set_option': 'cartesian', - 'charge': 0, - 'operation': 'optimize', - 'spin_multiplicity': None, - 'theory': 'pspw', - 'theory_directives': {}, - 'title': None - } - ] - } - - a = Atoms(['Si', 'Si', 'Si' ,'Si', 'C', 'C', 'C', 'C'], - cell=[8.277, 8.277, 8.277]) - a.set_scaled_positions([ - (-0.5, -0.5, -0.5), - (0.0, 0.0, -0.5), - (0.0, -0.5, 0.0), - (-0.5, 0.0, 0.0), - (-0.25, -0.25, -0.25), - (0.25 ,0.25 ,-0.25), - (0.25, -0.25, 0.25), - (-0.25 ,0.25 ,0.25), - ]) - s = StructureData(ase=a) - - ## Test 1 - # Input file string prodiced by pymatgen - app = _prepare_pymatgen_dict(par, s) - # Target input file - target_string = '''set nwpw:minimizer 2 -set nwpw:psi_nolattice .true. -set includestress .true. -geometry units au center noautosym noautoz print - Si -4.1385 -4.1385 -4.1385 - Si 0.0 0.0 -4.1385 - Si 0.0 -4.1385 0.0 - Si -4.1385 0.0 0.0 - C -2.06925 -2.06925 -2.06925 - C 2.06925 2.06925 -2.06925 - C 2.06925 -2.06925 2.06925 - C -2.06925 2.06925 2.06925 -end - -title "pspw optimize" -charge 0 -basis cartesian - -end -driver - clear \n maxiter 40 -end -nwpw - ewald_ncut 8 - simulation_cell \n ngrid 16 16 16 - end -end -task pspw optimize -''' - self.assertEquals(app, target_string) - - ## Test 2 - par['add_cell'] = True +#import the generic test class for nwchem +from aiida.backends.djsite.db.testbase import AiidaTestCase +from aiida.backends.tests.nwchem import TestNwchem - # Input file string prodiced by pymatgen - app = _prepare_pymatgen_dict(par, s) - # Target input file - target_string = '''set nwpw:minimizer 2 -set nwpw:psi_nolattice .true. -set includestress .true. -geometry units au center noautosym noautoz print \n system crystal - lat_a 8.277 - lat_b 8.277 - lat_c 8.277 - alpha 90.0 - beta 90.0 - gamma 90.0 - end - Si -0.5 -0.5 -0.5 - Si 0.0 0.0 -0.5 - Si 0.0 -0.5 0.0 - Si -0.5 0.0 0.0 - C -0.25 -0.25 -0.25 - C 0.25 0.25 -0.25 - C 0.25 -0.25 0.25 - C -0.25 0.25 0.25 -end -title "pspw optimize" -charge 0 -basis cartesian +class TestNwchemDjango(TestNwchem, AiidaTestCase): + """ + These tests check the features of nwchem input file generator that differ + from the base Nwchem test + """ + pass -end -driver - clear \n maxiter 40 -end -nwpw - ewald_ncut 8 - simulation_cell \n ngrid 16 16 16 - end -end -task pspw optimize -''' - self.assertEquals(app, target_string) diff --git a/aiida/backends/sqlalchemy/tests/test_runner.py b/aiida/backends/sqlalchemy/tests/test_runner.py index 1af31332f8..ad08be0e64 100644 --- a/aiida/backends/sqlalchemy/tests/test_runner.py +++ b/aiida/backends/sqlalchemy/tests/test_runner.py @@ -21,8 +21,9 @@ def find_classes(module_str): def run_tests(): modules_str = [ - "aiida.backends.sqlalchemy.tests.nodes", - "aiida.backends.sqlalchemy.tests.backup_script", +# "aiida.backends.sqlalchemy.tests.nodes", +# "aiida.backends.sqlalchemy.tests.backup_script", + "aiida.backends.sqlalchemy.tests.nwchem" ] for module_str in modules_str: # Dynamically importing the module that interests us diff --git a/aiida/backends/tests/nwchem.py b/aiida/backends/tests/nwchem.py index cb169bdab9..d7467772dc 100644 --- a/aiida/backends/tests/nwchem.py +++ b/aiida/backends/tests/nwchem.py @@ -2,20 +2,19 @@ """ Tests for the NWChem input plugins. """ -import os -import tempfile from aiida.backends.djsite.db.testbase import AiidaTestCase from aiida.orm.calculation.job.nwchem.nwcpymatgen import _prepare_pymatgen_dict from aiida.orm.data.structure import has_ase, has_pymatgen, StructureData -from aiida.orm.data.cif import has_pycifrw -from django.utils import unittest +import unittest __copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For further information please visit http://www.aiida.net/. All rights reserved." __license__ = "MIT license, see LICENSE.txt file." __version__ = "0.7.0" __authors__ = "The AiiDA team." + + class TestNwchem(AiidaTestCase): @unittest.skipIf((not has_ase()) or (not has_pymatgen()), @@ -46,6 +45,7 @@ def test_1(self): 'nwpw': {'ewald_ncut': 8, 'simulation_cell': '\n ngrid 16 16 16\n end'} }, 'basis_set': {}, + 'basis_set_option': 'cartesian', 'charge': 0, 'operation': 'optimize', 'spin_multiplicity': None, @@ -70,8 +70,11 @@ def test_1(self): ]) s = StructureData(ase=a) - self.assertEquals(_prepare_pymatgen_dict(par,s), -'''set nwpw:minimizer 2 + ## Test 1 + # Input file string prodiced by pymatgen + app = _prepare_pymatgen_dict(par, s) + # Target input file + target_string = '''set nwpw:minimizer 2 set nwpw:psi_nolattice .true. set includestress .true. geometry units au center noautosym noautoz print @@ -87,30 +90,31 @@ def test_1(self): title "pspw optimize" charge 0 -basis +basis cartesian end driver - clear - maxiter 40 + clear \n maxiter 40 end nwpw ewald_ncut 8 - simulation_cell - ngrid 16 16 16 + simulation_cell \n ngrid 16 16 16 end end task pspw optimize -''') +''' + self.assertEquals(app, target_string) + ## Test 2 par['add_cell'] = True - self.assertEquals(_prepare_pymatgen_dict(par,s), -'''set nwpw:minimizer 2 + # Input file string prodiced by pymatgen + app = _prepare_pymatgen_dict(par, s) + # Target input file + target_string = '''set nwpw:minimizer 2 set nwpw:psi_nolattice .true. set includestress .true. -geometry units au center noautosym noautoz print - system crystal +geometry units au center noautosym noautoz print \n system crystal lat_a 8.277 lat_b 8.277 lat_c 8.277 @@ -130,18 +134,17 @@ def test_1(self): title "pspw optimize" charge 0 -basis +basis cartesian end driver - clear - maxiter 40 + clear \n maxiter 40 end nwpw ewald_ncut 8 - simulation_cell - ngrid 16 16 16 + simulation_cell \n ngrid 16 16 16 end end task pspw optimize -''') +''' + self.assertEquals(app, target_string) diff --git a/optional_requirements.txt b/optional_requirements.txt index 8a0de5ccac..4699327882 100644 --- a/optional_requirements.txt +++ b/optional_requirements.txt @@ -33,7 +33,7 @@ PyCifRW==3.6.2.1 ## Support for ssh transport with authentification through Kerberos token ## NOTE: you need to install first libffi (sudo apt-get install libffi-dev under Ubuntu) pyasn1>=0.1.9 -python-gssapi>=0.6.4 +#python-gssapi>=0.6.4 ## ICSD tools PyMySQL>=0.6.1 From 1d0fd1259427e0e1ca50e37f51321f4140647d90 Mon Sep 17 00:00:00 2001 From: Fernando Gargiulo Date: Fri, 25 Nov 2016 12:15:10 +0100 Subject: [PATCH 2/5] Added sqlachemy test folder. Minor changes in some comments --- .../djsite/db/subtests/dataclasses.py | 4 ++-- aiida/backends/sqlalchemy/tests/nwchem.py | 21 +++++++++++++++++++ 2 files changed, 23 insertions(+), 2 deletions(-) create mode 100644 aiida/backends/sqlalchemy/tests/nwchem.py diff --git a/aiida/backends/djsite/db/subtests/dataclasses.py b/aiida/backends/djsite/db/subtests/dataclasses.py index f71f2e203f..16a6bf821d 100644 --- a/aiida/backends/djsite/db/subtests/dataclasses.py +++ b/aiida/backends/djsite/db/subtests/dataclasses.py @@ -269,7 +269,7 @@ def test_get_aiida_structure(self): def test_ase_primitive_and_conventional_cells_ase(self): """ Checking the number of atoms per primitive/conventional cell - returned by ASE ase.io.cif.read_cif() method. Test input is + returned by ASE ase.io.read() method. Test input is adapted from http://www.crystallography.net/cod/9012064.cif@120115 """ import tempfile @@ -321,7 +321,7 @@ def test_ase_primitive_and_conventional_cells_ase(self): def test_ase_primitive_and_conventional_cells_pymatgen(self): """ Checking the number of atoms per primitive/conventional cell - returned by ASE ase.io.cif.read_cif() method. Test input is + returned by ASE ase.io.read() method. Test input is adapted from http://www.crystallography.net/cod/9012064.cif@120115 """ import tempfile diff --git a/aiida/backends/sqlalchemy/tests/nwchem.py b/aiida/backends/sqlalchemy/tests/nwchem.py new file mode 100644 index 0000000000..0c84c8034e --- /dev/null +++ b/aiida/backends/sqlalchemy/tests/nwchem.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +""" +Tests for the NWChem input plugins. +""" + +__copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For further information please visit http://www.aiida.net/. All rights reserved." +__license__ = "MIT license, see LICENSE.txt file." +__version__ = "0.7.0" +__authors__ = "The AiiDA team." + +#import the generic test class for nwchem +from aiida.backends.sqlalchemy.tests.testbase import SqlAlchemyTests +from aiida.backends.tests.nwchem import TestNwchem + + +class TestNwchemSqla(SqlAlchemyTests, TestNwchem): + """ + nwchem tests that do need to be specified for sqlalchemy backend + """ + pass + From bd187b5d03972a66ef453652a3c4919f5934bb2d Mon Sep 17 00:00:00 2001 From: Fernando Gargiulo Date: Fri, 25 Nov 2016 15:39:30 +0100 Subject: [PATCH 3/5] separated tests for tcodexporter. Waiting for export_tree to be implemented for sqlalchemy --- .../djsite/db/subtests/tcodexporter.py | 722 +----------------- aiida/backends/sqlalchemy/tests/nwchem.py | 3 +- .../backends/sqlalchemy/tests/tcodexporter.py | 22 + .../backends/sqlalchemy/tests/test_runner.py | 3 +- aiida/backends/tests/tcodexporter.py | 53 +- 5 files changed, 71 insertions(+), 732 deletions(-) create mode 100644 aiida/backends/sqlalchemy/tests/tcodexporter.py diff --git a/aiida/backends/djsite/db/subtests/tcodexporter.py b/aiida/backends/djsite/db/subtests/tcodexporter.py index 5c6a1b77fe..c05253d809 100644 --- a/aiida/backends/djsite/db/subtests/tcodexporter.py +++ b/aiida/backends/djsite/db/subtests/tcodexporter.py @@ -1,728 +1,22 @@ # -*- coding: utf-8 -*- """ -Tests for TestTcodDbExporter +Tests for the Tcod exporter. """ -from django.utils import unittest - -from aiida.backends.djsite.db.testbase import AiidaTestCase -from aiida.common.links import LinkType __copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For further information please visit http://www.aiida.net/. All rights reserved." __license__ = "MIT license, see LICENSE.txt file." __version__ = "0.7.0" __authors__ = "The AiiDA team." - -class FakeObject(object): - """ - A wrapper for dictionary, which can be used instead of object. - Example use case: fake Calculation object ``calc``, having keys - ``inp`` and ``out`` to access also fake NodeInputManager and - NodeOutputManager. - """ - - def __init__(self, dictionary): - self._dictionary = dictionary - - def __getattr__(self, name): - if isinstance(self._dictionary[name], dict): - return FakeObject(self._dictionary[name]) - else: - return self._dictionary[name] +#import the generic test class for nwchem +from aiida.backends.djsite.db.testbase import AiidaTestCase +from aiida.backends.tests.tcodexporter import TestTcodDbExporter -class TestTcodDbExporter(AiidaTestCase): +class TestTcodDbExporterDjango(TestTcodDbExporter, AiidaTestCase): """ - Tests for TcodDbExporter class. + These tests check the features of nwchem input file generator that differ + from the base TcodDbExporter test """ - from aiida.orm.data.structure import has_ase, has_pyspglib - from aiida.orm.data.cif import has_pycifrw - - def test_contents_encoding(self): - """ - Testing the logic of choosing the encoding and the process of - encoding contents. - """ - from aiida.tools.dbexporters.tcod import cif_encode_contents - self.assertEquals(cif_encode_contents('simple line')[1], - None) - self.assertEquals(cif_encode_contents(' ;\n ;')[1], - None) - self.assertEquals(cif_encode_contents(';\n'), - ('=3B\n', 'quoted-printable')) - self.assertEquals(cif_encode_contents('line\n;line'), - ('line\n=3Bline', 'quoted-printable')) - self.assertEquals(cif_encode_contents('tabbed\ttext'), - ('tabbed=09text', 'quoted-printable')) - self.assertEquals(cif_encode_contents('angstrom Å'), - ('angstrom =C3=85', 'quoted-printable')) - self.assertEquals(cif_encode_contents('.'), - ('=2E', 'quoted-printable')) - self.assertEquals(cif_encode_contents('?'), - ('=3F', 'quoted-printable')) - self.assertEquals(cif_encode_contents('.?'), ('.?', None)) - # This one is particularly tricky: a long line is folded by the QP - # and the semicolon sign becomes the first character on a new line. - self.assertEquals(cif_encode_contents( - "Å{};a".format("".join("a" for i in range(0, 69)))), - ('=C3=85aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaa=\n=3Ba', - 'quoted-printable')) - self.assertEquals(cif_encode_contents('angstrom ÅÅÅ'), - ('YW5nc3Ryb20gw4XDhcOF', 'base64')) - self.assertEquals(cif_encode_contents( - "".join("a" for i in range(0, 2048)))[1], - None) - self.assertEquals(cif_encode_contents( - "".join("a" for i in range(0, 2049)))[1], - 'quoted-printable') - self.assertEquals(cif_encode_contents('datatest')[1], None) - self.assertEquals(cif_encode_contents('data_test')[1], 'base64') - - def test_collect_files(self): - """ - Testing the collection of files from file tree. - """ - from aiida.tools.dbexporters.tcod import _collect_files - from aiida.common.folders import SandboxFolder - import StringIO - - sf = SandboxFolder() - sf.get_subfolder('out', create=True) - sf.get_subfolder('pseudo', create=True) - sf.get_subfolder('save', create=True) - sf.get_subfolder('save/1', create=True) - sf.get_subfolder('save/2', create=True) - - f = StringIO.StringIO("test") - sf.create_file_from_filelike(f, 'aiida.in') - f = StringIO.StringIO("test") - sf.create_file_from_filelike(f, 'aiida.out') - f = StringIO.StringIO("test") - sf.create_file_from_filelike(f, '_aiidasubmit.sh') - f = StringIO.StringIO("test") - sf.create_file_from_filelike(f, '_.out') - f = StringIO.StringIO("test") - sf.create_file_from_filelike(f, 'out/out') - f = StringIO.StringIO("test") - sf.create_file_from_filelike(f, 'save/1/log.log') - - md5 = '098f6bcd4621d373cade4e832627b4f6' - sha1 = 'a94a8fe5ccb19ba61c4c0873d391e987982fbbd3' - self.assertEquals( - _collect_files(sf.abspath), - [{'name': '_.out', 'contents': 'test', 'md5': md5, - 'sha1': sha1, 'type': 'file'}, - {'name': '_aiidasubmit.sh', 'contents': 'test', 'md5': md5, - 'sha1': sha1, 'type': 'file'}, - {'name': 'aiida.in', 'contents': 'test', 'md5': md5, - 'sha1': sha1, 'type': 'file'}, - {'name': 'aiida.out', 'contents': 'test', 'md5': md5, - 'sha1': sha1, 'type': 'file'}, - {'name': 'out/', 'type': 'folder'}, - {'name': 'out/out', 'contents': 'test', 'md5': md5, - 'sha1': sha1, 'type': 'file'}, - {'name': 'pseudo/', 'type': 'folder'}, - {'name': 'save/', 'type': 'folder'}, - {'name': 'save/1/', 'type': 'folder'}, - {'name': 'save/1/log.log', 'contents': 'test', 'md5': md5, - 'sha1': sha1, 'type': 'file'}, - {'name': 'save/2/', 'type': 'folder'}]) - - @unittest.skipIf(not has_ase() or not has_pyspglib() or not has_pycifrw(), - "Unable to import ase or pyspglib") - def test_cif_structure_roundtrip(self): - from aiida.tools.dbexporters.tcod import export_cif, export_values - from aiida.orm import Code - from aiida.orm import JobCalculation - from aiida.orm.data.cif import CifData - from aiida.orm.data.parameter import ParameterData - from aiida.orm.data.upf import UpfData - from aiida.orm.data.folder import FolderData - from aiida.common.folders import SandboxFolder - from aiida.common.datastructures import calc_states - import tempfile - - with tempfile.NamedTemporaryFile() as f: - f.write(''' - data_test - _cell_length_a 10 - _cell_length_b 10 - _cell_length_c 10 - _cell_angle_alpha 90 - _cell_angle_beta 90 - _cell_angle_gamma 90 - loop_ - _atom_site_label - _atom_site_fract_x - _atom_site_fract_y - _atom_site_fract_z - C 0 0 0 - O 0.5 0.5 0.5 - ''') - f.flush() - a = CifData(file=f.name) - - c = a._get_aiida_structure() - c.store() - pd = ParameterData() - - code = Code(local_executable='test.sh') - with tempfile.NamedTemporaryFile() as f: - f.write("#/bin/bash\n\necho test run\n") - f.flush() - code.add_path(f.name, 'test.sh') - - code.store() - - calc = JobCalculation(computer=self.computer) - calc.set_resources({'num_machines': 1, - 'num_mpiprocs_per_machine': 1}) - calc.add_link_from(code, "code") - calc.set_environment_variables({'PATH': '/dev/null', 'USER': 'unknown'}) - - with tempfile.NamedTemporaryFile(prefix="Fe") as f: - f.write("\nelement=\"Fe\"\n") - f.flush() - upf = UpfData(file=f.name) - upf.store() - calc.add_link_from(upf, "upf") - - with tempfile.NamedTemporaryFile() as f: - f.write("data_test") - f.flush() - cif = CifData(file=f.name) - cif.store() - calc.add_link_from(cif, "cif") - - calc.store() - calc._set_state(calc_states.SUBMITTING) - with SandboxFolder() as f: - calc._store_raw_input_folder(f.abspath) - - fd = FolderData() - with open(fd._get_folder_pathsubfolder.get_abs_path( - calc._SCHED_OUTPUT_FILE), 'w') as f: - f.write("standard output") - f.flush() - - with open(fd._get_folder_pathsubfolder.get_abs_path( - calc._SCHED_ERROR_FILE), 'w') as f: - f.write("standard error") - f.flush() - - fd.store() - fd.add_link_from(calc, calc._get_linkname_retrieved(), LinkType.CREATE) - - pd.add_link_from(calc, "calc", LinkType.CREATE) - pd.store() - - with self.assertRaises(ValueError): - export_cif(c, parameters=pd) - - c.add_link_from(calc, "calc", LinkType.CREATE) - export_cif(c, parameters=pd) - - values = export_values(c, parameters=pd) - values = values['0'] - - self.assertEquals(values['_tcod_computation_environment'], - ['PATH=/dev/null\nUSER=unknown']) - self.assertEquals(values['_tcod_computation_command'], - ['cd 0; ./_aiidasubmit.sh']) - - def test_pw_translation(self): - from aiida.tools.dbexporters.tcod \ - import translate_calculation_specific_values - from aiida.tools.dbexporters.tcod_plugins.pw \ - import PwTcodtranslator as PWT - from aiida.tools.dbexporters.tcod_plugins.cp \ - import CpTcodtranslator as CPT - from aiida.orm.code import Code - from aiida.orm.data.array import ArrayData - from aiida.orm.data.array.kpoints import KpointsData - from aiida.orm.data.parameter import ParameterData - import numpy - - code = Code() - code._set_attr('remote_exec_path', '/test') - - kpoints = KpointsData() - kpoints.set_kpoints_mesh([2, 3, 4], offset=[0.25, 0.5, 0.75]) - - def empty_list(): - return [] - - calc = FakeObject({ - "inp": {"parameters": ParameterData(dict={}), - "kpoints": kpoints, "code": code}, - "out": {"output_parameters": ParameterData(dict={})}, - "get_inputs": empty_list - }) - - res = translate_calculation_specific_values(calc, PWT) - self.assertEquals(res, { - '_dft_BZ_integration_grid_X': 2, - '_dft_BZ_integration_grid_Y': 3, - '_dft_BZ_integration_grid_Z': 4, - '_dft_BZ_integration_grid_shift_X': 0.25, - '_dft_BZ_integration_grid_shift_Y': 0.5, - '_dft_BZ_integration_grid_shift_Z': 0.75, - '_dft_pseudopotential_atom_type': [], - '_dft_pseudopotential_type': [], - '_dft_pseudopotential_type_other_name': [], - '_tcod_software_package': 'Quantum ESPRESSO', - '_tcod_software_executable_path': '/test', - }) - - calc = FakeObject({ - "inp": {"parameters": ParameterData(dict={ - 'SYSTEM': {'ecutwfc': 40, 'occupations': 'smearing'} - })}, - "out": {"output_parameters": ParameterData(dict={ - 'number_of_electrons': 10, - })}, - "get_inputs": empty_list - }) - res = translate_calculation_specific_values(calc, PWT) - self.assertEquals(res, { - '_dft_cell_valence_electrons': 10, - '_tcod_software_package': 'Quantum ESPRESSO', - '_dft_BZ_integration_smearing_method': 'Gaussian', - '_dft_pseudopotential_atom_type': [], - '_dft_pseudopotential_type': [], - '_dft_pseudopotential_type_other_name': [], - '_dft_kinetic_energy_cutoff_EEX': 2176.910676048, - '_dft_kinetic_energy_cutoff_charge_density': 2176.910676048, - '_dft_kinetic_energy_cutoff_wavefunctions': 544.227669012, - }) - - calc = FakeObject({ - "inp": {"parameters": ParameterData(dict={})}, - "out": {"output_parameters": ParameterData(dict={ - 'energy_xc': 5, - })}, - "get_inputs": empty_list - }) - with self.assertRaises(ValueError): - translate_calculation_specific_values(calc, PWT) - - calc = FakeObject({ - "inp": {"parameters": ParameterData(dict={})}, - "out": {"output_parameters": ParameterData(dict={ - 'energy_xc': 5, - 'energy_xc_units': 'meV' - })}, - "get_inputs": empty_list - }) - with self.assertRaises(ValueError): - translate_calculation_specific_values(calc, PWT) - - energies = { - 'energy': -3701.7004199449257, - 'energy_one_electron': -984.0078459766, - 'energy_xc': -706.6986753641559, - 'energy_ewald': -2822.6335103043157, - 'energy_hartree': 811.6396117001462, - 'fermi_energy': 10.25208617898623, - } - dct = energies - for key in energies.keys(): - dct["{}_units".format(key)] = 'eV' - calc = FakeObject({ - "inp": {"parameters": ParameterData(dict={ - 'SYSTEM': {'smearing': 'mp'} - })}, - "out": {"output_parameters": ParameterData(dict=dct)}, - "get_inputs": empty_list - }) - res = translate_calculation_specific_values(calc, PWT) - self.assertEquals(res, { - '_tcod_total_energy': energies['energy'], - '_dft_1e_energy': energies['energy_one_electron'], - '_dft_correlation_energy': energies['energy_xc'], - '_dft_ewald_energy': energies['energy_ewald'], - '_dft_hartree_energy': energies['energy_hartree'], - '_dft_fermi_energy': energies['fermi_energy'], - '_tcod_software_package': 'Quantum ESPRESSO', - '_dft_BZ_integration_smearing_method': 'Methfessel-Paxton', - '_dft_BZ_integration_MP_order': 1, - '_dft_pseudopotential_atom_type': [], - '_dft_pseudopotential_type': [], - '_dft_pseudopotential_type_other_name': [], - }) - dct = energies - dct['number_of_electrons'] = 10 - for key in energies.keys(): - dct["{}_units".format(key)] = 'eV' - calc = FakeObject({ - "inp": {"parameters": ParameterData(dict={ - 'SYSTEM': {'smearing': 'unknown-method'} - })}, - "out": {"output_parameters": ParameterData(dict=dct)}, - "get_inputs": empty_list - }) - res = translate_calculation_specific_values(calc, CPT) - self.assertEquals(res, {'_dft_cell_valence_electrons': 10, - '_tcod_software_package': - 'Quantum ESPRESSO'}) - - ad = ArrayData() - ad.set_array("forces", numpy.array([[[1, 2, 3], [4, 5, 6]]])) - calc = FakeObject({ - "inp": {"parameters": ParameterData(dict={ - 'SYSTEM': {'smearing': 'unknown-method'} - })}, - "out": {"output_parameters": ParameterData(dict={}), - "output_array": ad}, - "get_inputs": empty_list - }) - res = translate_calculation_specific_values(calc, PWT) - self.assertEquals(res, { - '_tcod_software_package': 'Quantum ESPRESSO', - '_dft_BZ_integration_smearing_method': 'other', - '_dft_BZ_integration_smearing_method_other': 'unknown-method', - '_dft_pseudopotential_atom_type': [], - '_dft_pseudopotential_type': [], - '_dft_pseudopotential_type_other_name': [], - ## Residual forces are no longer produced, as they should - ## be in the same CIF loop with coordinates -- to be - ## implemented later, since it's not yet clear how. - # '_tcod_atom_site_resid_force_Cartn_x': [1,4], - # '_tcod_atom_site_resid_force_Cartn_y': [2,5], - # '_tcod_atom_site_resid_force_Cartn_z': [3,6], - }) - - def test_nwcpymatgen_translation(self): - from aiida.tools.dbexporters.tcod \ - import translate_calculation_specific_values - from aiida.tools.dbexporters.tcod_plugins.nwcpymatgen \ - import NwcpymatgenTcodtranslator as NPT - from aiida.orm.data.parameter import ParameterData - from tcodexporter import FakeObject - - calc = FakeObject({ - "out": {"output": - ParameterData(dict={ - "basis_set": { - "H": { - "description": "6-31g", - "functions": "2", - "shells": "2", - "types": "2s" - }, - "O": { - "description": "6-31g", - "functions": "9", - "shells": "5", - "types": "3s2p" - } - }, - "corrections": {}, - "energies": [ - -2057.99011937535 - ], - "errors": [], - "frequencies": None, - "has_error": False, - "job_type": "NWChem SCF Module" - }), - "job_info": ParameterData(dict={ - "0 permanent": ".", - "0 scratch": ".", - "argument 1": "aiida.in", - "compiled": "Sun_Dec_22_04:02:59_2013", - "data base": "./aiida.db", - "date": "Mon May 11 17:10:07 2015", - "ga revision": "10379", - "global": "200.0 Mbytes (distinct from heap & stack)", - "hardfail": "no", - "heap": "100.0 Mbytes", - "hostname": "theospc11", - "input": "aiida.in", - "nproc": "6", - "nwchem branch": "6.3", - "nwchem revision": "24277", - "prefix": "aiida.", - "program": "/usr/bin/nwchem", - "source": "/build/buildd/nwchem-6.3+r1", - "stack": "100.0 Mbytes", - "status": "startup", - "time left": "-1s", - "total": "400.0 Mbytes", - "verify": "yes", - }) - }}) - res = translate_calculation_specific_values(calc, NPT) - self.assertEquals(res, { - '_tcod_software_package': 'NWChem', - '_tcod_software_package_version': '6.3', - '_tcod_software_package_compilation_date': '2013-12-22T04:02:59', - '_atom_type_symbol': ['H', 'O'], - '_dft_atom_basisset': ['6-31g', '6-31g'], - '_dft_atom_type_valence_configuration': ['2s', '3s2p'], - }) - - @unittest.skipIf(not has_ase() or not has_pycifrw() or not has_pyspglib(), - "Unable to import ase, pycifrw or pyspglib") - def test_inline_export(self): - from aiida.orm.data.cif import CifData - from aiida.tools.dbexporters.tcod import export_values - import tempfile - - with tempfile.NamedTemporaryFile() as f: - f.write(''' - data_test - _cell_length_a 10 - _cell_length_b 10 - _cell_length_c 10 - _cell_angle_alpha 90 - _cell_angle_beta 90 - _cell_angle_gamma 90 - loop_ - _atom_site_label - _atom_site_fract_x - _atom_site_fract_y - _atom_site_fract_z - C 0 0 0 - O 0.5 0.5 0.5 - ''') - f.flush() - a = CifData(file=f.name) - - s = a._get_aiida_structure(store=True) - val = export_values(s) - script = val.first_block()['_tcod_file_contents'][1] - function = '_get_aiida_structure_ase_inline' - self.assertNotEqual(script.find(function), script.rfind(function)) - - @unittest.skipIf(not has_ase() or not has_pyspglib() or not has_pycifrw(), - "Unable to import ase or pyspglib") - def test_symmetry_reduction(self): - from aiida.orm.data.structure import StructureData - from aiida.tools.dbexporters.tcod import export_values - from ase import Atoms - - a = Atoms('BaTiO3', cell=(4., 4., 4.)) - a.set_scaled_positions( - ((0.0, 0.0, 0.0), - (0.5, 0.5, 0.5), - (0.5, 0.5, 0.0), - (0.5, 0.0, 0.5), - (0.0, 0.5, 0.5), - ) - ) - - a.set_chemical_symbols(['Ba', 'Ti', 'O', 'O', 'O']) - val = export_values(StructureData(ase=a), reduce_symmetry=True, store=True)['0'] - self.assertEqual(val['_atom_site_label'], ['Ba1', 'Ti1', 'O1']) - self.assertEqual(val['_symmetry_space_group_name_H-M'], 'Pm-3m') - self.assertEqual(val['_symmetry_space_group_name_Hall'], '-P 4 2 3') - - def test_cmdline_parameters(self): - """ - Ensuring that neither extend_with_cmdline_parameters() nor - deposition_cmdline_parameters() set default parameters. - """ - from aiida.tools.dbexporters.tcod \ - import extend_with_cmdline_parameters, \ - deposition_cmdline_parameters - import argparse - - parser = argparse.ArgumentParser() - extend_with_cmdline_parameters(parser) - options = vars(parser.parse_args(args=[])) - - for key in options.keys(): - if options[key] is None: - options.pop(key) - - self.assertEqual(options, {}) - - parser = argparse.ArgumentParser() - deposition_cmdline_parameters(parser) - options = vars(parser.parse_args(args=[])) - - for key in options.keys(): - if options[key] is None: - options.pop(key) - - self.assertEqual(options, {}) - - @unittest.skipIf(not has_ase() or not has_pycifrw() or not has_pyspglib(), - "Unable to import ase, pycifrw or pyspglib") - def test_export_trajectory(self): - from aiida.orm.data.structure import StructureData - from aiida.orm.data.array.trajectory import TrajectoryData - from aiida.tools.dbexporters.tcod import export_values - - cells = [ - [[2., 0., 0., ], - [0., 2., 0., ], - [0., 0., 2., ]], - [[3., 0., 0., ], - [0., 3., 0., ], - [0., 0., 3., ]] - ] - symbols = [['H', 'O', 'C'], ['H', 'O', 'C']] - positions = [ - [[0., 0., 0.], - [0.5, 0.5, 0.5], - [1.5, 1.5, 1.5]], - [[0., 0., 0.], - [0.75, 0.75, 0.75], - [1.25, 1.25, 1.25]] - ] - structurelist = [] - for i in range(0, 2): - struct = StructureData(cell=cells[i]) - for j, symbol in enumerate(symbols[i]): - struct.append_atom(symbols=symbol, position=positions[i][j]) - structurelist.append(struct) - - td = TrajectoryData(structurelist=structurelist) - - with self.assertRaises(ValueError): - # Trajectory index is not specified - v = export_values(td) - - expected_tags = [ - '_atom_site_fract_x', - '_atom_site_fract_y', - '_atom_site_fract_z', - '_atom_site_label', - '_atom_site_type_symbol', - '_audit_conform_dict_location', - '_audit_conform_dict_name', - '_audit_conform_dict_version', - '_audit_creation_method', - '_cell_angle_alpha', - '_cell_angle_beta', - '_cell_angle_gamma', - '_cell_length_a', - '_cell_length_b', - '_cell_length_c', - '_chemical_formula_sum', - '_symmetry_Int_Tables_number', - '_symmetry_equiv_pos_as_xyz', - '_symmetry_space_group_name_H-M', - '_symmetry_space_group_name_Hall' - ] - - tcod_file_tags = [ - '_tcod_content_encoding_id', - '_tcod_content_encoding_layer_id', - '_tcod_content_encoding_layer_type', - '_tcod_file_URI', - '_tcod_file_content_encoding', - '_tcod_file_contents', - '_tcod_file_id', - '_tcod_file_md5sum', - '_tcod_file_name', - '_tcod_file_role', - '_tcod_file_sha1sum' - ] - - # Not stored and not to be stored: - v = export_values(td, trajectory_index=1) - self.assertEqual(sorted(v['0'].keys()), expected_tags) - - # Stored, but not expected to be stored: - td = TrajectoryData(structurelist=structurelist) - td.store() - v = export_values(td, trajectory_index=1) - self.assertEqual(sorted(v['0'].keys()), - expected_tags + tcod_file_tags) - - # Not stored, but expected to be stored: - td = TrajectoryData(structurelist=structurelist) - v = export_values(td, trajectory_index=1, store=True) - self.assertEqual(sorted(v['0'].keys()), - expected_tags + tcod_file_tags) - - # Both stored and expected to be stored: - td = TrajectoryData(structurelist=structurelist) - td.store() - v = export_values(td, trajectory_index=1, store=True) - self.assertEqual(sorted(v['0'].keys()), - expected_tags + tcod_file_tags) - - # Stored, but asked not to include DB dump: - td = TrajectoryData(structurelist=structurelist) - td.store() - v = export_values(td, trajectory_index=1, - dump_aiida_database=False) - self.assertEqual(sorted(v['0'].keys()), - expected_tags) - - def test_contents_encoding(self): - """ - Testing the logic of choosing the encoding and the process of - encoding contents. - """ - from aiida.tools.dbexporters.tcod import decode_textfield - - def test_ncr(self, inp, out): - from aiida.tools.dbexporters.tcod import (encode_textfield_ncr, - decode_textfield_ncr) - encoded = encode_textfield_ncr(inp) - decoded = decode_textfield_ncr(out) - decoded_universal = decode_textfield(out, 'ncr') - self.assertEquals(encoded, out) - self.assertEquals(decoded, inp) - self.assertEquals(decoded_universal, inp) - - def test_quoted_printable(self, inp, out): - from aiida.tools.dbexporters.tcod import (encode_textfield_quoted_printable, - decode_textfield_quoted_printable) - encoded = encode_textfield_quoted_printable(inp) - decoded = decode_textfield_quoted_printable(out) - decoded_universal = decode_textfield(out, 'quoted-printable') - self.assertEquals(encoded, out) - self.assertEquals(decoded, inp) - self.assertEquals(decoded_universal, inp) - - def test_base64(self, inp, out): - from aiida.tools.dbexporters.tcod import (encode_textfield_base64, - decode_textfield_base64) - encoded = encode_textfield_base64(inp) - decoded = decode_textfield_base64(out) - decoded_universal = decode_textfield(out, 'base64') - self.assertEquals(encoded, out) - self.assertEquals(decoded, inp) - self.assertEquals(decoded_universal, inp) - - def test_gzip_base64(self, text): - from aiida.tools.dbexporters.tcod import (encode_textfield_gzip_base64, - decode_textfield_gzip_base64) - encoded = encode_textfield_gzip_base64(text) - decoded = decode_textfield_gzip_base64(encoded) - decoded_universal = decode_textfield(encoded, 'gzip+base64') - self.assertEquals(text, decoded) - self.assertEquals(text, decoded_universal) - - test_ncr(self, '.', '.') - test_ncr(self, '?', '?') - test_ncr(self, ';\n', ';\n') - test_ncr(self, 'line\n;line', 'line\n;line') - test_ncr(self, 'tabbed\ttext', 'tabbed text') - test_ncr(self, 'angstrom Å', 'angstrom Ã…') - test_ncr(self, 'Ã…', - 'Ã…') - - test_quoted_printable(self, '.', '=2E') - test_quoted_printable(self, '?', '=3F') - test_quoted_printable(self, ';\n', '=3B\n') - test_quoted_printable(self, 'line\n;line', 'line\n=3Bline') - test_quoted_printable(self, 'tabbed\ttext', 'tabbed=09text') - test_quoted_printable(self, 'angstrom Å', 'angstrom =C3=85') - test_quoted_printable(self, 'line\rline\x00', 'line=0Dline=00') - # This one is particularly tricky: a long line is folded by the QP - # and the semicolon sign becomes the first character on a new line. - test_quoted_printable(self, - "Å{};a".format("".join("a" for i in range(0, 69))), - '=C3=85aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' - 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaa=\n=3Ba') + pass - test_base64(self, 'angstrom ÅÅÅ', 'YW5nc3Ryb20gw4XDhcOF') - test_gzip_base64(self, 'angstrom ÅÅÅ') diff --git a/aiida/backends/sqlalchemy/tests/nwchem.py b/aiida/backends/sqlalchemy/tests/nwchem.py index 0c84c8034e..b070ba9c50 100644 --- a/aiida/backends/sqlalchemy/tests/nwchem.py +++ b/aiida/backends/sqlalchemy/tests/nwchem.py @@ -17,5 +17,4 @@ class TestNwchemSqla(SqlAlchemyTests, TestNwchem): """ nwchem tests that do need to be specified for sqlalchemy backend """ - pass - + pass \ No newline at end of file diff --git a/aiida/backends/sqlalchemy/tests/tcodexporter.py b/aiida/backends/sqlalchemy/tests/tcodexporter.py new file mode 100644 index 0000000000..74906f8c18 --- /dev/null +++ b/aiida/backends/sqlalchemy/tests/tcodexporter.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +""" +Tests for the Tcod exporter +""" + +__copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For further information please visit http://www.aiida.net/. All rights reserved." +__license__ = "MIT license, see LICENSE.txt file." +__version__ = "0.7.0" +__authors__ = "The AiiDA team." + +#import the generic test class for nwchem +from aiida.backends.sqlalchemy.tests.testbase import SqlAlchemyTests +from aiida.backends.tests.tcodexporter import TestTcodDbExporter + + +class TestTcodDbExporterSqla(SqlAlchemyTests, TestTcodDbExporter): + """ + tcod database exporter tests that do need to be specified for sqlalchemy + backend + """ + pass + diff --git a/aiida/backends/sqlalchemy/tests/test_runner.py b/aiida/backends/sqlalchemy/tests/test_runner.py index ad08be0e64..8cd227c2b3 100644 --- a/aiida/backends/sqlalchemy/tests/test_runner.py +++ b/aiida/backends/sqlalchemy/tests/test_runner.py @@ -23,7 +23,8 @@ def run_tests(): modules_str = [ # "aiida.backends.sqlalchemy.tests.nodes", # "aiida.backends.sqlalchemy.tests.backup_script", - "aiida.backends.sqlalchemy.tests.nwchem" +# "aiida.backends.sqlalchemy.tests.nwchem", + "aiida.backends.sqlalchemy.tests.tcodexporter", ] for module_str in modules_str: # Dynamically importing the module that interests us diff --git a/aiida/backends/tests/tcodexporter.py b/aiida/backends/tests/tcodexporter.py index 22bb550196..0b2f0f8aff 100644 --- a/aiida/backends/tests/tcodexporter.py +++ b/aiida/backends/tests/tcodexporter.py @@ -2,7 +2,7 @@ """ Tests for TestTcodDbExporter """ -from django.utils import unittest +import unittest from aiida.backends.djsite.db.testbase import AiidaTestCase from aiida.common.links import LinkType @@ -250,19 +250,27 @@ def test_pw_translation(self): kpoints = KpointsData() kpoints.set_kpoints_mesh([2, 3, 4], offset=[0.25, 0.5, 0.75]) + def empty_list(): + return [] + calc = FakeObject({ "inp": {"parameters": ParameterData(dict={}), "kpoints": kpoints, "code": code}, - "out": {"output_parameters": ParameterData(dict={})} + "out": {"output_parameters": ParameterData(dict={})}, + "get_inputs": empty_list }) + res = translate_calculation_specific_values(calc, PWT) self.assertEquals(res, { - '_integration_grid_X': 2, - '_integration_grid_Y': 3, - '_integration_grid_Z': 4, - '_integration_grid_shift_X': 0.25, - '_integration_grid_shift_Y': 0.5, - '_integration_grid_shift_Z': 0.75, + '_dft_BZ_integration_grid_X': 2, + '_dft_BZ_integration_grid_Y': 3, + '_dft_BZ_integration_grid_Z': 4, + '_dft_BZ_integration_grid_shift_X': 0.25, + '_dft_BZ_integration_grid_shift_Y': 0.5, + '_dft_BZ_integration_grid_shift_Z': 0.75, + '_dft_pseudopotential_atom_type': [], + '_dft_pseudopotential_type': [], + '_dft_pseudopotential_type_other_name': [], '_tcod_software_package': 'Quantum ESPRESSO', '_tcod_software_executable_path': '/test', }) @@ -273,13 +281,17 @@ def test_pw_translation(self): })}, "out": {"output_parameters": ParameterData(dict={ 'number_of_electrons': 10, - })} + })}, + "get_inputs": empty_list }) res = translate_calculation_specific_values(calc, PWT) self.assertEquals(res, { '_dft_cell_valence_electrons': 10, '_tcod_software_package': 'Quantum ESPRESSO', '_dft_BZ_integration_smearing_method': 'Gaussian', + '_dft_pseudopotential_atom_type': [], + '_dft_pseudopotential_type': [], + '_dft_pseudopotential_type_other_name': [], '_dft_kinetic_energy_cutoff_EEX': 2176.910676048, '_dft_kinetic_energy_cutoff_charge_density': 2176.910676048, '_dft_kinetic_energy_cutoff_wavefunctions': 544.227669012, @@ -289,7 +301,8 @@ def test_pw_translation(self): "inp": {"parameters": ParameterData(dict={})}, "out": {"output_parameters": ParameterData(dict={ 'energy_xc': 5, - })} + })}, + "get_inputs": empty_list }) with self.assertRaises(ValueError): translate_calculation_specific_values(calc, PWT) @@ -299,7 +312,8 @@ def test_pw_translation(self): "out": {"output_parameters": ParameterData(dict={ 'energy_xc': 5, 'energy_xc_units': 'meV' - })} + })}, + "get_inputs": empty_list }) with self.assertRaises(ValueError): translate_calculation_specific_values(calc, PWT) @@ -319,7 +333,8 @@ def test_pw_translation(self): "inp": {"parameters": ParameterData(dict={ 'SYSTEM': {'smearing': 'mp'} })}, - "out": {"output_parameters": ParameterData(dict=dct)} + "out": {"output_parameters": ParameterData(dict=dct)}, + "get_inputs": empty_list }) res = translate_calculation_specific_values(calc, PWT) self.assertEquals(res, { @@ -332,6 +347,9 @@ def test_pw_translation(self): '_tcod_software_package': 'Quantum ESPRESSO', '_dft_BZ_integration_smearing_method': 'Methfessel-Paxton', '_dft_BZ_integration_MP_order': 1, + '_dft_pseudopotential_atom_type': [], + '_dft_pseudopotential_type': [], + '_dft_pseudopotential_type_other_name': [], }) dct = energies dct['number_of_electrons'] = 10 @@ -341,7 +359,8 @@ def test_pw_translation(self): "inp": {"parameters": ParameterData(dict={ 'SYSTEM': {'smearing': 'unknown-method'} })}, - "out": {"output_parameters": ParameterData(dict=dct)} + "out": {"output_parameters": ParameterData(dict=dct)}, + "get_inputs": empty_list }) res = translate_calculation_specific_values(calc, CPT) self.assertEquals(res, {'_dft_cell_valence_electrons': 10, @@ -355,13 +374,17 @@ def test_pw_translation(self): 'SYSTEM': {'smearing': 'unknown-method'} })}, "out": {"output_parameters": ParameterData(dict={}), - "output_array": ad} + "output_array": ad}, + "get_inputs": empty_list }) res = translate_calculation_specific_values(calc, PWT) self.assertEquals(res, { '_tcod_software_package': 'Quantum ESPRESSO', '_dft_BZ_integration_smearing_method': 'other', '_dft_BZ_integration_smearing_method_other': 'unknown-method', + '_dft_pseudopotential_atom_type': [], + '_dft_pseudopotential_type': [], + '_dft_pseudopotential_type_other_name': [], ## Residual forces are no longer produced, as they should ## be in the same CIF loop with coordinates -- to be ## implemented later, since it's not yet clear how. @@ -693,7 +716,7 @@ def test_gzip_base64(self, text): test_quoted_printable(self, 'line\n;line', 'line\n=3Bline') test_quoted_printable(self, 'tabbed\ttext', 'tabbed=09text') test_quoted_printable(self, 'angstrom Å', 'angstrom =C3=85') - test_quoted_printable(self, 'line\rline\x00', 'line\rline=00') + test_quoted_printable(self, 'line\rline\x00', 'line=0Dline=00') # This one is particularly tricky: a long line is folded by the QP # and the semicolon sign becomes the first character on a new line. test_quoted_printable(self, From 4e83770edbb6dc8b5ce89069b10f7051c00f3b49 Mon Sep 17 00:00:00 2001 From: Fernando Gargiulo Date: Fri, 2 Dec 2016 09:43:16 +0100 Subject: [PATCH 4/5] Added scaffolding for more tests --- .../djsite/db/subtests/quantumespressopw.py | 248 +----------------- .../db/subtests/quantumespressopwimmigrant.py | 233 ++-------------- .../sqlalchemy/tests/quantumespressopw.py | 38 +++ .../tests/quantumespressopwimmigrant.py | 63 +++++ .../backends/sqlalchemy/tests/test_runner.py | 4 +- aiida/backends/sqlalchemy/tests/testbase.py | 4 + aiida/backends/tests/quantumespressopw.py | 56 ++-- .../tests/quantumespressopwimmigrant.py | 33 +-- aiida/orm/implementation/sqlalchemy/node.py | 3 +- 9 files changed, 163 insertions(+), 519 deletions(-) create mode 100644 aiida/backends/sqlalchemy/tests/quantumespressopw.py create mode 100644 aiida/backends/sqlalchemy/tests/quantumespressopwimmigrant.py diff --git a/aiida/backends/djsite/db/subtests/quantumespressopw.py b/aiida/backends/djsite/db/subtests/quantumespressopw.py index 81934f4400..cc689a814f 100644 --- a/aiida/backends/djsite/db/subtests/quantumespressopw.py +++ b/aiida/backends/djsite/db/subtests/quantumespressopw.py @@ -1,36 +1,19 @@ # -*- coding: utf-8 -*- """ -Tests for the pw input plugin. - -TODO: to test: -- association species->pseudos -- two pseudos with the same filename -- IFPOS (FIXED_COORDS in SETTINGS) -- automatic namelists -- manually specified namelists -- empty namelists -- content for non-existent namelists specified +Tests for the pw input plugin specific to django """ -import os from aiida.backends.djsite.db.testbase import AiidaTestCase -from aiida.orm import CalculationFactory, DataFactory +from aiida.backends.tests.quantumespressopw import TestQEPWInputGeneration from aiida.orm.code import Code -from aiida.common.folders import SandboxFolder -from aiida.common.exceptions import InputValidationError -import aiida -__copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For further information please visit http://www.aiida.net/. All rights reserved." +__copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For " \ + u"further information please visit http://www.aiida.net/. All " \ + u"rights reserved." __license__ = "MIT license, see LICENSE.txt file." __version__ = "0.7.0" __authors__ = "The AiiDA team." -QECalc = CalculationFactory('quantumespresso.pw') -StructureData = DataFactory('structure') -ParameterData = DataFactory('parameter') -UpfData = DataFactory('upf') -KpointsData = DataFactory('array.kpoints') - class QETestCase(AiidaTestCase): @classmethod @@ -40,225 +23,14 @@ def setUpClass(cls): 'computer': cls.computer, 'resources': { 'num_machines': 1, - 'num_mpiprocs_per_machine': 1} + 'num_mpiprocs_per_machine': 1 + } } cls.code = Code(remote_computer_exec=(cls.computer, '/x.x')).store() - -class TestQEPWInputGeneration(QETestCase): +class TestQEPWInputGenerationDjango(QETestCase, TestQEPWInputGeneration): """ - Test if the input is correctly generated + tests that are specific to Django """ - - def test_inputs(self): - import logging - - cell = ((2., 0., 0.), (0., 2., 0.), (0., 0., 2.)) - - input_params = { - 'CONTROL': { - 'calculation': 'vc-relax', - 'restart_mode': 'from_scratch', - 'wf_collect': True, - }, - 'SYSTEM': { - 'ecutwfc': 47., - 'ecutrho': 568., - }, - 'ELECTRONS': { - 'conv_thr': 1.e-10, - }, - } - - c = QECalc(**self.calc_params).store() - s = StructureData(cell=cell) - s.append_atom(position=(0., 0., 0.), symbols=['Ba']) - s.store() - - p = ParameterData(dict=input_params).store() - - k = KpointsData() - k.set_kpoints_mesh([4, 4, 4]) - k.store() - - pseudo_dir = os.path.join(os.path.split(aiida.__file__)[0], - os.pardir, 'examples', - 'testdata', 'qepseudos') - - raw_pseudos = [ - ("Ba.pbesol-spn-rrkjus_psl.0.2.3-tot-pslib030.UPF", 'Ba'), - ("Ti.pbesol-spn-rrkjus_psl.0.2.3-tot-pslib030.UPF", 'Ti'), - ("O.pbesol-n-rrkjus_psl.0.1-tested-pslib030.UPF", 'O'), - ] - - pseudos = {} - # suppress debug messages - logging.disable(logging.ERROR) - for fname, elem in raw_pseudos: - absname = os.path.realpath(os.path.join(pseudo_dir, fname)) - pseudo, _ = UpfData.get_or_create( - absname, use_first=True) - pseudos[elem] = pseudo - # Reset logging level - logging.disable(logging.NOTSET) - - inputdict = c.get_inputs_dict() - inputdict.pop('code',None) - - with SandboxFolder() as f: - # I use the same SandboxFolder more than once because nothing - # should be written for these failing tests - - # Missing required input nodes - with self.assertRaises(InputValidationError): - c._prepare_for_submission(f, inputdict) - c.use_parameters(p) - inputdict = c.get_inputs_dict() - with self.assertRaises(InputValidationError): - c._prepare_for_submission(f, inputdict) - c.use_structure(s) - inputdict = c.get_inputs_dict() - with self.assertRaises(InputValidationError): - c._prepare_for_submission(f, inputdict) - c.use_kpoints(k) - inputdict = c.get_inputs_dict() - with self.assertRaises(InputValidationError): - c._prepare_for_submission(f, inputdict) - c.use_pseudo(pseudos['Ba'], 'Ba') - - inputdict = c.get_inputs_dict() - with self.assertRaises(InputValidationError): - c._prepare_for_submission(f, inputdict) - c.use_code(self.code) - inputdict = c.get_inputs_dict() - c._prepare_for_submission(f, inputdict) - - # TODO: split this test in more than one - c.use_pseudo(pseudos['Ti'], 'Ti') - inputdict = c.get_inputs_dict() - # Too many pseudos - with self.assertRaises(InputValidationError): - c._prepare_for_submission(f, inputdict) - - def test_inputs_with_multiple_species(self): - """ - Test the creation of the input file when there are two species - associated to the same element, with different starting_magnetization - values. - """ - import logging - - s = StructureData(cell=[ - [2.871, 0., 0.], - [0., 2.871, 0.], - [0., 0., 2.871]]) - - ## I leave this as a reference, but I use instead the - ## append_atom method - # from aiida.orm.data.structure import Kind, Site - #s.append_kind(Kind(symbols='Ba', name='Ba1')) - #s.append_kind(Kind(symbols='Ba', name='Ba2')) - #s.append_site(Site(kind_name='Ba1', position=[0.,0.,0.])) - #s.append_site(Site(kind_name='Ba2', position=[1.4355,1.4355,1.4355])) - s.append_atom(symbols='Ba', position=[0., 0., 0.], name='Ba1') - s.append_atom(symbols='Ba', position=[1.4355, 1.4355, 1.4355], name='Ba2') - - input_params = { - 'CONTROL': { - 'calculation': 'vc-relax', - 'restart_mode': 'from_scratch', - 'wf_collect': True, - }, - 'SYSTEM': { - 'ecutwfc': 47., - 'ecutrho': 568., - 'nspin': 2, - 'starting_magnetization': {'Ba1': 0.5, - 'Ba2': -0.5}, - }, - 'ELECTRONS': { - 'conv_thr': 1.e-10, - }, - } - - c = QECalc(**self.calc_params).store() - c.use_code(self.code) - - p = ParameterData(dict=input_params).store() - - k = KpointsData() - k.set_kpoints_mesh([4, 4, 4]) - k.store() - - pseudo_dir = os.path.join(os.path.split(aiida.__file__)[0], - os.pardir, 'examples', - 'testdata', 'qepseudos') - - raw_pseudos = [ - ("Ba.pbesol-spn-rrkjus_psl.0.2.3-tot-pslib030.UPF", 'Ba'), - ] - - pseudos = {} - # suppress debug messages - logging.disable(logging.ERROR) - for fname, elem in raw_pseudos: - absname = os.path.realpath(os.path.join(pseudo_dir, fname)) - pseudo, _ = UpfData.get_or_create( - absname, use_first=True) - pseudos[elem] = pseudo - # Reset logging level - logging.disable(logging.NOTSET) - - c.use_parameters(p) - c.use_structure(s) - c.use_kpoints(k) - - with SandboxFolder() as f: - # I use the same SandboxFolder more than once because nothing - # should be written for these failing tests - - # Same pseudo for two species - c.use_pseudo(pseudos['Ba'], ['Ba1', 'Ba2']) - inputdict = c.get_inputs_dict() - c._prepare_for_submission(f, inputdict) - - with open(os.path.join(f.abspath, 'aiida.in')) as infile: - lines = [_.strip() for _ in infile.readlines()] - - find_kind_ba1 = any('Ba1' in l and - 'Ba.pbesol-spn-rrkjus_psl.0.2.3-tot-pslib030.UPF' in l - for l in lines) - self.assertTrue(find_kind_ba1, "Unable to find the species line " - "for Ba1") - find_kind_ba2 = any('Ba2' in l and - 'Ba.pbesol-spn-rrkjus_psl.0.2.3-tot-pslib030.UPF' in l - for l in lines) - self.assertTrue(find_kind_ba2, "Unable to find the species line " - "for Ba2") - - found1 = False - found2 = False - for l in lines: - if 'starting_magnetization(1)' in l: - if found1: - raise ValueError( - "starting_magnetization(1) found multiple times") - found1 = True - self.assertAlmostEquals( - float(l.split('=')[1].replace('d', 'e')), 0.5) - if 'starting_magnetization(2)' in l: - if found2: - raise ValueError( - "starting_magnetization(2) found multiple times") - found2 = True - self.assertAlmostEquals( - float(l.split('=')[1].replace('d', 'e')), -0.5) - - - - - - - - + pass diff --git a/aiida/backends/djsite/db/subtests/quantumespressopwimmigrant.py b/aiida/backends/djsite/db/subtests/quantumespressopwimmigrant.py index 550a4cb330..cd938f547c 100644 --- a/aiida/backends/djsite/db/subtests/quantumespressopwimmigrant.py +++ b/aiida/backends/djsite/db/subtests/quantumespressopwimmigrant.py @@ -1,42 +1,18 @@ # -*- coding: utf-8 -*- """ -Tests for the pwimmigrant plugin for Quantum Espresso. - -The directory, ``./pwtestjobs/``, contains small QE jobs that are used to test -the parsing and units conversion of an immigration. The test jobs should all -contain the same structure, input parameters, ect., but the units of the -input files differ, in order to test the unit and coordinate transformations -of the PwInputTools methods. The only thing that should vary between some of -them is the type of k-points (manual, gamma, and automatic). For this -reason, their are three separate tests. - -Note: It was necessary to break these tests up into individual classes to -prevent the SQL database from being overloaded [resulting in the error -"DatabaseError: too many SQL variables"]. Breaking up the tests into -individual classes causes the setUpClass and tearDownClass methods to be -called for each group of test jabs. - -In each test, the group of test jobs with the same type of kpoints are -immigrated to create a PwimmigrantCalculation node. The inherited method, -``submit_test``, is used to create an Aiida-standard input file in a temporary -directory. The text contained in this input is read in and later compared -against the input texts of the other jobs in the group. These input texts -should match identically, with the exception of small deviations in the -numerical values contained within. - -The daemon process, ``retrieve_jobs``, is called upon immigration of the group -of jobs, in order to test the correct preparation of the PwimmigrantCalculation. +Tests for the pwimmigrant plugin for Quantum Espresso specific to Django """ + # TODO: Test exception handling of user errors. -import os from aiida.backends.djsite.db.testbase import AiidaTestCase -from aiida.orm.calculation.job.quantumespresso.pwimmigrant import PwimmigrantCalculation -from aiida.daemon.execmanager import retrieve_jobs -from aiida.common.folders import SandboxFolder from aiida.orm.code import Code from aiida.backends.djsite.db.models import DbAuthInfo -from aiida.tools.codespecific.quantumespresso.pwinputparser import str2val + +#Tests imports +from aiida.backends.tests.quantumespressopwimmigrant import LocalSetup, \ + TestPwImmigrantCalculationAutomatic, TestPwImmigrantCalculationGamma, \ + TestPwImmigrantCalculationManual __copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For further information please visit http://www.aiida.net/. All rights reserved." __license__ = "MIT license, see LICENSE.txt file." @@ -44,18 +20,11 @@ __authors__ = "The AiiDA team." -# Define the path to the directory containing the test PW runs. -TEST_JOB_DIR = os.path.join(os.path.dirname(__file__), 'pwtestjobs') - -# Get the prefixes of all the test jobs. The prefix defines the input and -# output file names. -PEFIXES = [fnm.strip('.in') for fnm in os.listdir(TEST_JOB_DIR) - if fnm.endswith('.in')] - - -class LocalTestCase(AiidaTestCase): +class LocalTestCase(AiidaTestCase, LocalSetup): """ - AiidaTesetCase subclass that uses local transport and defs helper methods. + AiidaTesetCase subclass specific to Django. + It uses local transport and + defs helper methods. Also sets up authinfo, so calcs can be retrieved and parsed, and sets up a code, so test submissions can be run. @@ -76,187 +45,23 @@ def setUpClass(cls): # Set up a code linked to cls.computer. The path is just a fake string. cls.code = Code(remote_computer_exec=(cls.computer, '/x.x')).store() - - def run_tests_on_calcs_with_prefixes(self, prefixes): - """ - Test immigration, retrieval, and parsing of calcs for all prefixes. - - Prefixes should be a group of prefixes that refer to calculations whose - Aiida-generated input files should be identical. - - :param prefixes: A group of prefixes that refer to calculations whose - Aiida-generated input files should be identical. - :type prefixes: list of str - """ - - # Get the computer's transport and create instance. - Transport = self.computer.get_transport_class() - transport = Transport() - - # Initialize arrays for storing data for each job. - inpt_txts = [] - - # Open the transport for the duration of immigrations, so it's not - # reopened for each one. This would really matter for ssh tranports. - with transport as t: - # Loop over all manual prefixes. - for prefix in prefixes: - - # Define the calc's initialization parameters. These result in - # calling of the `set_` methods with the specified values. - init_params = { - 'computer': self.computer, - 'resources': {'num_machines': 1, - 'num_mpiprocs_per_machine': 1}, - 'remote_workdir': TEST_JOB_DIR, - 'input_file_name': prefix + '.in', - 'output_file_name': prefix + '.out' - } - # Initialize the calculation using the `set_` methods. - calc = PwimmigrantCalculation(**init_params) - # Set the code. - calc.use_code(self.code) - - # Create the input nodes. - try: - calc.create_input_nodes(t) # Open transport passed. - except Exception as error: - self.fail( - "Error creating input nodes for prefix '{}':\n{}\n\n" - "".format(prefix, error) - ) - - # Submit a test submission in a temporary directory and store - # the input file's contents. Need to do this before now, - # because calc's state is NEW. - with SandboxFolder() as folder: - # Submit test and get the subfolder containing the input - # file. - subfolder = calc.submit_test(folder, prefix)[0] - # Get the path of the input file. - inpt_path = os.path.join(subfolder.abspath, prefix + '.in') - # Open the input file, read and store it's contents. - with open(inpt_path) as f: - inpt_txts.append(f.read()) - # Prepare the calc for retrieval and parsing. - calc.prepare_for_retrieval_and_parsing(transport) - # Call the daemon's retrieval function, so all immigrated calcs get - # retrieved and parsed. - try: - retrieve_jobs() - except Exception as error: - self.fail("Error during retrieval of immigrated calcs:\n{}\n\n" - "".format(error) - ) - # Test the create_input_nodes method by comparing the input files - # generated above by the submit_test method. The first input file - # will serve as the reference. - ref_words = inpt_txts[0].split() - for txt, prefix in zip(inpt_txts[1:], prefixes[1:]): - # Loop over the words of the reference and current input files. - for w1, w2 in zip(ref_words, txt.split()): - - # If the words are not the same, and the reference word is - # not the calculation's prefix parameter... - if w2 != w1 and w1.strip("'") not in prefixes: - - # Try using the regex-based str2val function of - # pwinputparser to convert the word strings into python - # values. - try: - val1, val2 = [str2val(x) for x in (w1, w2)] - except Exception as error: - self.fail( - "The strings, '{}' and '{}', of the submit_test " - "input files for calcs with prefixes {} and {} " - "were not equal and could not be converted to " - "python values using the str2val function of " - "pwinputparser.\nThe exception thrown was:\n{" - "}\n\n".format( - w1, w2, prefixes[0], prefix, error - ) - ) - - # If both values were converted to floats... - if all([type(v) is float for v in val1, val2]): - # Test if they differ by more than a specified - # tolerance. - self.assertAlmostEqual( - val1, val2, 4, - msg="The values, {} and {}, of the submit_test " - "input files for calcs with prefixes {} and {} " - "are not within the specified number of " - "decimal places." - "".format( - val1, val2, prefixes[0], prefix - ) - ) - - # If they weren't floats, then they should have been - # identical, so the test fails. - else: - self.assertEqual( - val1, val2, - msg="The values, {} and {}, of the submit_test " - "input files for calcs with prefixes {} and {} " - "did not match. They should have been " - "identical!".format( - val1, val2, prefixes[0], prefix - ) - ) - - -class TestPwImmigrantCalculationManual(LocalTestCase): +class TestPwImmigrantCalculationManualDjango(LocalTestCase, TestPwImmigrantCalculationManual): """ - Tests for immigration, retrieval, and parsing of manual kpoint jobs. """ + pass - def test_manual(self): - """ - Test immigration, retrieval, and parsing of manual kpoint jobs. - """ - - # Filter out all prefixes with manually specified kpoints. - manual_prefixes = filter( - lambda x: 'automatic' not in x and 'gamma' not in x, PEFIXES - ) - - # Test this group of prefixes. - self.run_tests_on_calcs_with_prefixes(manual_prefixes) - - -class TestPwImmigrantCalculationAutomatic(LocalTestCase): +class TestPwImmigrantCalculationAutomaticDjango(LocalTestCase, + TestPwImmigrantCalculationAutomatic): """ - Tests for immigration, retrieval, and parsing of automatic kpoint jobs. """ + pass - def test_automatic(self): - """ - Test immigration, retrieval, and parsing of automatic kpoint jobs. - """ - - # Filter out all prefixes with automatic kpoints. - automatic_prefixes = filter(lambda x: 'automatic' in x, PEFIXES) - - # Test this group of prefixes. - self.run_tests_on_calcs_with_prefixes(automatic_prefixes) - -class TestPwImmigrantCalculationGamma(LocalTestCase): +class TestPwImmigrantCalculationGammaDjango(LocalTestCase, + TestPwImmigrantCalculationGamma): """ - Tests for immigration, retrieval, and parsing of gamma kpoint jobs. """ - - def test_gamma(self): - """ - Test immigration, retrieval, and parsing of gamma kpoint jobs. - """ - - # Filter out all prefixes with gamma kpoints. - gamma_prefixes = filter(lambda x: 'gamma' in x, PEFIXES) - - # Test this group of prefixes. - self.run_tests_on_calcs_with_prefixes(gamma_prefixes) + pass \ No newline at end of file diff --git a/aiida/backends/sqlalchemy/tests/quantumespressopw.py b/aiida/backends/sqlalchemy/tests/quantumespressopw.py new file mode 100644 index 0000000000..407960dc90 --- /dev/null +++ b/aiida/backends/sqlalchemy/tests/quantumespressopw.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +""" +Tests for the pw input plugin specific to sqlalchemy +""" + +from aiida.backends.sqlalchemy.tests.testbase import SqlAlchemyTests +from aiida.backends.tests.quantumespressopw import TestQEPWInputGeneration +from aiida.orm.code import Code + +__copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For " \ + u"further information please visit http://www.aiida.net/. All " \ + u"rights reserved." +__license__ = "MIT license, see LICENSE.txt file." +__version__ = "0.7.0" +__authors__ = "The AiiDA team." + + +class QETestCase(SqlAlchemyTests): + @classmethod + def setUpClass(cls): + super(QETestCase, cls).setUpClass() + cls.calc_params = { + 'computer': cls.computer, + 'resources': { + 'num_machines': 1, + 'num_mpiprocs_per_machine': 1 + } + } + cls.code = Code(remote_computer_exec=(cls.computer, '/x.x')) + cls.code.store() + + +class TestQEPWInputGenerationSqla(QETestCase, TestQEPWInputGeneration): + """ + tests that are specific to Django + """ + pass + diff --git a/aiida/backends/sqlalchemy/tests/quantumespressopwimmigrant.py b/aiida/backends/sqlalchemy/tests/quantumespressopwimmigrant.py new file mode 100644 index 0000000000..2fa5ad46b7 --- /dev/null +++ b/aiida/backends/sqlalchemy/tests/quantumespressopwimmigrant.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +""" +Tests for the pwimmigrant plugin for Quantum Espresso specific to Django +""" + +# TODO: Test exception handling of user errors. +from aiida.backends.sqlalchemy.tests.testbase import SqlAlchemyTests +from aiida.orm.code import Code +from aiida.backends.sqlalchemy.models.authinfo import DbAuthInfo + +#Tests imports +from aiida.backends.tests.quantumespressopwimmigrant import LocalSetup, \ + TestPwImmigrantCalculationAutomatic, TestPwImmigrantCalculationGamma, \ + TestPwImmigrantCalculationManual + +__copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For further information please visit http://www.aiida.net/. All rights reserved." +__license__ = "MIT license, see LICENSE.txt file." +__version__ = "0.7.0" +__authors__ = "The AiiDA team." + + +class LocalTestCase(SqlAlchemyTests, LocalSetup): + """ + AiidaTesetCase subclass that uses local transport and defs helper methods. + + Also sets up authinfo, so calcs can be retrieved and parsed, and sets up a + code, so test submissions can be run. + """ + + @classmethod + def setUpClass(cls): + super(LocalTestCase, cls).setUpClass() + + # Change transport type to local. + cls.computer.set_transport_type('local') + + # Configure authinfo for cls.computer and cls.user. + authinfo = DbAuthInfo(dbcomputer=cls.computer.dbcomputer, + aiidauser=cls.user) + authinfo.set_auth_params({}) + authinfo.save() + + # Set up a code linked to cls.computer. The path is just a fake string. + cls.code = Code(remote_computer_exec=(cls.computer, '/x.x')).store() + +class TestPwImmigrantCalculationManualSqla(LocalTestCase, + TestPwImmigrantCalculationManual): + """ + """ + pass + +class TestPwImmigrantCalculationAutomaticSqla(LocalTestCase, + TestPwImmigrantCalculationAutomatic): + """ + """ + pass + + +class TestPwImmigrantCalculationGammaSqla(LocalTestCase, + TestPwImmigrantCalculationGamma): + """ + """ + pass \ No newline at end of file diff --git a/aiida/backends/sqlalchemy/tests/test_runner.py b/aiida/backends/sqlalchemy/tests/test_runner.py index 4b3104162d..09add34ea5 100644 --- a/aiida/backends/sqlalchemy/tests/test_runner.py +++ b/aiida/backends/sqlalchemy/tests/test_runner.py @@ -21,10 +21,12 @@ def find_classes(module_str): def run_tests(): modules_str = [ - "aiida.backends.sqlalchemy.tests.query", + # "aiida.backends.sqlalchemy.tests.query", # "aiida.backends.sqlalchemy.tests.nodes", # "aiida.backends.sqlalchemy.tests.backup_script", # "aiida.backends.sqlalchemy.tests.export_and_import", + "aiida.backends.sqlalchemy.tests.quantumespressopw", + "aiida.backends.sqlalchemy.tests.quantumespressopwimmigrant" ] for module_str in modules_str: # Dynamically importing the module that interests us diff --git a/aiida/backends/sqlalchemy/tests/testbase.py b/aiida/backends/sqlalchemy/tests/testbase.py index 86249d0147..afc0472209 100644 --- a/aiida/backends/sqlalchemy/tests/testbase.py +++ b/aiida/backends/sqlalchemy/tests/testbase.py @@ -70,11 +70,15 @@ def setUpClass(cls, initial_data=True): cls.user = DbUser(get_configured_user_email(), "foo", "bar", "tests") cls.test_session.add(cls.user) cls.test_session.commit() + else: + cls.user = has_user has_computer = DbComputer.query.filter(DbComputer.hostname == 'localhost').first() if not has_computer: cls.computer = SqlAlchemyTests._create_computer() cls.computer.store() + else: + cls.computer = has_computer @staticmethod diff --git a/aiida/backends/tests/quantumespressopw.py b/aiida/backends/tests/quantumespressopw.py index 81934f4400..e37f478c8f 100644 --- a/aiida/backends/tests/quantumespressopw.py +++ b/aiida/backends/tests/quantumespressopw.py @@ -11,16 +11,17 @@ - empty namelists - content for non-existent namelists specified """ + import os -from aiida.backends.djsite.db.testbase import AiidaTestCase -from aiida.orm import CalculationFactory, DataFactory -from aiida.orm.code import Code -from aiida.common.folders import SandboxFolder -from aiida.common.exceptions import InputValidationError import aiida +from aiida.common.exceptions import InputValidationError +from aiida.common.folders import SandboxFolder +from aiida.orm import CalculationFactory, DataFactory -__copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For further information please visit http://www.aiida.net/. All rights reserved." +__copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For " \ + u"further information please visit http://www.aiida.net/. All " \ + u"rights reserved." __license__ = "MIT license, see LICENSE.txt file." __version__ = "0.7.0" __authors__ = "The AiiDA team." @@ -32,21 +33,7 @@ KpointsData = DataFactory('array.kpoints') -class QETestCase(AiidaTestCase): - @classmethod - def setUpClass(cls): - super(QETestCase, cls).setUpClass() - cls.calc_params = { - 'computer': cls.computer, - 'resources': { - 'num_machines': 1, - 'num_mpiprocs_per_machine': 1} - } - cls.code = Code(remote_computer_exec=(cls.computer, '/x.x')).store() - - - -class TestQEPWInputGeneration(QETestCase): +class TestQEPWInputGeneration(): """ Test if the input is correctly generated """ @@ -104,7 +91,7 @@ def test_inputs(self): logging.disable(logging.NOTSET) inputdict = c.get_inputs_dict() - inputdict.pop('code',None) + inputdict.pop('code', None) with SandboxFolder() as f: # I use the same SandboxFolder more than once because nothing @@ -157,12 +144,13 @@ def test_inputs_with_multiple_species(self): ## I leave this as a reference, but I use instead the ## append_atom method # from aiida.orm.data.structure import Kind, Site - #s.append_kind(Kind(symbols='Ba', name='Ba1')) - #s.append_kind(Kind(symbols='Ba', name='Ba2')) - #s.append_site(Site(kind_name='Ba1', position=[0.,0.,0.])) - #s.append_site(Site(kind_name='Ba2', position=[1.4355,1.4355,1.4355])) + # s.append_kind(Kind(symbols='Ba', name='Ba1')) + # s.append_kind(Kind(symbols='Ba', name='Ba2')) + # s.append_site(Site(kind_name='Ba1', position=[0.,0.,0.])) + # s.append_site(Site(kind_name='Ba2', position=[1.4355,1.4355,1.4355])) s.append_atom(symbols='Ba', position=[0., 0., 0.], name='Ba1') - s.append_atom(symbols='Ba', position=[1.4355, 1.4355, 1.4355], name='Ba2') + s.append_atom(symbols='Ba', position=[1.4355, 1.4355, 1.4355], + name='Ba2') input_params = { 'CONTROL': { @@ -174,8 +162,10 @@ def test_inputs_with_multiple_species(self): 'ecutwfc': 47., 'ecutrho': 568., 'nspin': 2, - 'starting_magnetization': {'Ba1': 0.5, - 'Ba2': -0.5}, + 'starting_magnetization': { + 'Ba1': 0.5, + 'Ba2': -0.5 + }, }, 'ELECTRONS': { 'conv_thr': 1.e-10, @@ -254,11 +244,3 @@ def test_inputs_with_multiple_species(self): found2 = True self.assertAlmostEquals( float(l.split('=')[1].replace('d', 'e')), -0.5) - - - - - - - - diff --git a/aiida/backends/tests/quantumespressopwimmigrant.py b/aiida/backends/tests/quantumespressopwimmigrant.py index 550a4cb330..e41889f6de 100644 --- a/aiida/backends/tests/quantumespressopwimmigrant.py +++ b/aiida/backends/tests/quantumespressopwimmigrant.py @@ -30,12 +30,9 @@ # TODO: Test exception handling of user errors. import os -from aiida.backends.djsite.db.testbase import AiidaTestCase from aiida.orm.calculation.job.quantumespresso.pwimmigrant import PwimmigrantCalculation from aiida.daemon.execmanager import retrieve_jobs from aiida.common.folders import SandboxFolder -from aiida.orm.code import Code -from aiida.backends.djsite.db.models import DbAuthInfo from aiida.tools.codespecific.quantumespresso.pwinputparser import str2val __copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For further information please visit http://www.aiida.net/. All rights reserved." @@ -53,30 +50,10 @@ if fnm.endswith('.in')] -class LocalTestCase(AiidaTestCase): +class LocalSetup(): """ - AiidaTesetCase subclass that uses local transport and defs helper methods. - - Also sets up authinfo, so calcs can be retrieved and parsed, and sets up a - code, so test submissions can be run. + Setup functions that are common to all backends """ - - @classmethod - def setUpClass(cls): - super(LocalTestCase, cls).setUpClass() - - # Change transport type to local. - cls.computer.set_transport_type('local') - - # Configure authinfo for cls.computer and cls.user. - authinfo = DbAuthInfo(dbcomputer=cls.computer.dbcomputer, - aiidauser=cls.user) - authinfo.set_auth_params({}) - authinfo.save() - - # Set up a code linked to cls.computer. The path is just a fake string. - cls.code = Code(remote_computer_exec=(cls.computer, '/x.x')).store() - def run_tests_on_calcs_with_prefixes(self, prefixes): """ Test immigration, retrieval, and parsing of calcs for all prefixes. @@ -209,7 +186,7 @@ def run_tests_on_calcs_with_prefixes(self, prefixes): ) -class TestPwImmigrantCalculationManual(LocalTestCase): +class TestPwImmigrantCalculationManual(): """ Tests for immigration, retrieval, and parsing of manual kpoint jobs. """ @@ -228,7 +205,7 @@ def test_manual(self): self.run_tests_on_calcs_with_prefixes(manual_prefixes) -class TestPwImmigrantCalculationAutomatic(LocalTestCase): +class TestPwImmigrantCalculationAutomatic(): """ Tests for immigration, retrieval, and parsing of automatic kpoint jobs. """ @@ -245,7 +222,7 @@ def test_automatic(self): self.run_tests_on_calcs_with_prefixes(automatic_prefixes) -class TestPwImmigrantCalculationGamma(LocalTestCase): +class TestPwImmigrantCalculationGamma(): """ Tests for immigration, retrieval, and parsing of gamma kpoint jobs. """ diff --git a/aiida/orm/implementation/sqlalchemy/node.py b/aiida/orm/implementation/sqlalchemy/node.py index ecf54c907a..6531544160 100644 --- a/aiida/orm/implementation/sqlalchemy/node.py +++ b/aiida/orm/implementation/sqlalchemy/node.py @@ -69,7 +69,8 @@ def __init__(self, **kwargs): else: # TODO: allow to get the user from the parameters user = get_automatic_user() - self._dbnode = DbNode(user=user, + + self._dbnode = DbNode(user_id=user.id, uuid=get_new_uuid(), type=self._plugin_type_string) From f8d8b9c044df98d541d2bdbdb6f2b2c13297757c Mon Sep 17 00:00:00 2001 From: Fernando Gargiulo Date: Fri, 2 Dec 2016 18:39:12 +0100 Subject: [PATCH 5/5] qepw and qepwimmigrant test working for both django and sqlalchemy. expire_on_commit=False in the SqlaTestCase class --- .../sqlalchemy/tests/quantumespressopw.py | 18 +++++++++++------- .../tests/quantumespressopwimmigrant.py | 8 +++++--- aiida/backends/sqlalchemy/tests/test_runner.py | 9 +++++---- aiida/backends/sqlalchemy/tests/testbase.py | 7 ++++--- .../tests/quantumespressopwimmigrant.py | 8 ++++---- aiida/orm/implementation/django/node.py | 13 ++++++------- aiida/orm/implementation/sqlalchemy/code.py | 1 + aiida/orm/implementation/sqlalchemy/node.py | 5 ++++- 8 files changed, 40 insertions(+), 29 deletions(-) diff --git a/aiida/backends/sqlalchemy/tests/quantumespressopw.py b/aiida/backends/sqlalchemy/tests/quantumespressopw.py index 407960dc90..b235a00a9a 100644 --- a/aiida/backends/sqlalchemy/tests/quantumespressopw.py +++ b/aiida/backends/sqlalchemy/tests/quantumespressopw.py @@ -15,10 +15,16 @@ __authors__ = "The AiiDA team." -class QETestCase(SqlAlchemyTests): +class TestQEPWInputGenerationSqla(SqlAlchemyTests, TestQEPWInputGeneration): + """ + tests that are specific to Django + """ + + #The setupClass is overwritten to add specific objects @classmethod def setUpClass(cls): - super(QETestCase, cls).setUpClass() + super(TestQEPWInputGenerationSqla, cls).setUpClass() + cls.calc_params = { 'computer': cls.computer, 'resources': { @@ -26,13 +32,11 @@ def setUpClass(cls): 'num_mpiprocs_per_machine': 1 } } - cls.code = Code(remote_computer_exec=(cls.computer, '/x.x')) + + cls.code = Code() + cls.code.set_remote_computer_exec((cls.computer, '/x.x')) cls.code.store() -class TestQEPWInputGenerationSqla(QETestCase, TestQEPWInputGeneration): - """ - tests that are specific to Django - """ pass diff --git a/aiida/backends/sqlalchemy/tests/quantumespressopwimmigrant.py b/aiida/backends/sqlalchemy/tests/quantumespressopwimmigrant.py index 2fa5ad46b7..d3ef95b454 100644 --- a/aiida/backends/sqlalchemy/tests/quantumespressopwimmigrant.py +++ b/aiida/backends/sqlalchemy/tests/quantumespressopwimmigrant.py @@ -7,6 +7,7 @@ from aiida.backends.sqlalchemy.tests.testbase import SqlAlchemyTests from aiida.orm.code import Code from aiida.backends.sqlalchemy.models.authinfo import DbAuthInfo +import aiida.backends.sqlalchemy #Tests imports from aiida.backends.tests.quantumespressopwimmigrant import LocalSetup, \ @@ -31,7 +32,7 @@ class LocalTestCase(SqlAlchemyTests, LocalSetup): def setUpClass(cls): super(LocalTestCase, cls).setUpClass() - # Change transport type to local. + # Change transport type to local cls.computer.set_transport_type('local') # Configure authinfo for cls.computer and cls.user. @@ -40,8 +41,9 @@ def setUpClass(cls): authinfo.set_auth_params({}) authinfo.save() - # Set up a code linked to cls.computer. The path is just a fake string. - cls.code = Code(remote_computer_exec=(cls.computer, '/x.x')).store() + cls.code = Code() + cls.code.set_remote_computer_exec((cls.computer, '/x.x')) + cls.code.store() class TestPwImmigrantCalculationManualSqla(LocalTestCase, TestPwImmigrantCalculationManual): diff --git a/aiida/backends/sqlalchemy/tests/test_runner.py b/aiida/backends/sqlalchemy/tests/test_runner.py index 09add34ea5..557d9427bb 100644 --- a/aiida/backends/sqlalchemy/tests/test_runner.py +++ b/aiida/backends/sqlalchemy/tests/test_runner.py @@ -21,12 +21,13 @@ def find_classes(module_str): def run_tests(): modules_str = [ - # "aiida.backends.sqlalchemy.tests.query", + # "aiida.backends.sqlalchemy.tests.query", # "aiida.backends.sqlalchemy.tests.nodes", # "aiida.backends.sqlalchemy.tests.backup_script", - # "aiida.backends.sqlalchemy.tests.export_and_import", - "aiida.backends.sqlalchemy.tests.quantumespressopw", - "aiida.backends.sqlalchemy.tests.quantumespressopwimmigrant" + # "aiida.backends.sqlalchemy.tests.export_and_import", + "aiida.backends.sqlalchemy.tests.nwchem", + "aiida.backends.sqlalchemy.tests.quantumespressopw", + "aiida.backends.sqlalchemy.tests.quantumespressopwimmigrant" ] for module_str in modules_str: # Dynamically importing the module that interests us diff --git a/aiida/backends/sqlalchemy/tests/testbase.py b/aiida/backends/sqlalchemy/tests/testbase.py index afc0472209..d424bf35f0 100644 --- a/aiida/backends/sqlalchemy/tests/testbase.py +++ b/aiida/backends/sqlalchemy/tests/testbase.py @@ -28,14 +28,14 @@ __authors__ = "The AiiDA team." __version__ = "0.7.0" -# Session = sessionmaker(expire_on_commit=False) -Session = sessionmaker(expire_on_commit=True) +Session = sessionmaker(expire_on_commit=False) +# Session = sessionmaker(expire_on_commit=True) class SqlAlchemyTests(unittest.TestCase): # Specify the need to drop the table at the beginning of a test case - drop_all = False + drop_all = True test_session = None @@ -91,6 +91,7 @@ def _create_computer(**kwargs): defaults.update(kwargs) return Computer(**defaults) + @staticmethod def inject_computer(f): @functools.wraps(f) diff --git a/aiida/backends/tests/quantumespressopwimmigrant.py b/aiida/backends/tests/quantumespressopwimmigrant.py index e41889f6de..b5af097fa6 100644 --- a/aiida/backends/tests/quantumespressopwimmigrant.py +++ b/aiida/backends/tests/quantumespressopwimmigrant.py @@ -50,7 +50,7 @@ if fnm.endswith('.in')] -class LocalSetup(): +class LocalSetup(object): """ Setup functions that are common to all backends """ @@ -186,7 +186,7 @@ def run_tests_on_calcs_with_prefixes(self, prefixes): ) -class TestPwImmigrantCalculationManual(): +class TestPwImmigrantCalculationManual(object): """ Tests for immigration, retrieval, and parsing of manual kpoint jobs. """ @@ -205,7 +205,7 @@ def test_manual(self): self.run_tests_on_calcs_with_prefixes(manual_prefixes) -class TestPwImmigrantCalculationAutomatic(): +class TestPwImmigrantCalculationAutomatic(object): """ Tests for immigration, retrieval, and parsing of automatic kpoint jobs. """ @@ -222,7 +222,7 @@ def test_automatic(self): self.run_tests_on_calcs_with_prefixes(automatic_prefixes) -class TestPwImmigrantCalculationGamma(): +class TestPwImmigrantCalculationGamma(object): """ Tests for immigration, retrieval, and parsing of gamma kpoint jobs. """ diff --git a/aiida/orm/implementation/django/node.py b/aiida/orm/implementation/django/node.py index 2857a79276..6f359de3d3 100644 --- a/aiida/orm/implementation/django/node.py +++ b/aiida/orm/implementation/django/node.py @@ -2,20 +2,19 @@ import copy +from django.core.exceptions import ObjectDoesNotExist from django.db import IntegrityError, transaction from django.db.models import F -from django.core.exceptions import ObjectDoesNotExist -from aiida.orm.implementation.general.node import AbstractNode, _NO_DEFAULT +from aiida.backends.djsite.db.models import DbLink +from aiida.backends.djsite.utils import get_automatic_user from aiida.common.exceptions import (InternalError, ModificationNotAllowed, NotExistent, UniquenessError) -from aiida.common.utils import get_new_uuid from aiida.common.folders import RepositoryFolder -from aiida.common.links import LinkType from aiida.common.lang import override - -from aiida.backends.djsite.utils import get_automatic_user -from aiida.backends.djsite.db.models import DbLink +from aiida.common.links import LinkType +from aiida.common.utils import get_new_uuid +from aiida.orm.implementation.general.node import AbstractNode, _NO_DEFAULT __copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For further information please visit http://www.aiida.net/. All rights reserved." __license__ = "MIT license, see LICENSE.txt file." diff --git a/aiida/orm/implementation/sqlalchemy/code.py b/aiida/orm/implementation/sqlalchemy/code.py index eda5fd7aaf..98487ddcf1 100644 --- a/aiida/orm/implementation/sqlalchemy/code.py +++ b/aiida/orm/implementation/sqlalchemy/code.py @@ -106,6 +106,7 @@ def set_remote_computer_exec(self, remote_computer_exec): remote_exec_path is the absolute path of the main executable on remote computer. """ + if (not isinstance(remote_computer_exec, (list, tuple)) or len(remote_computer_exec) != 2): raise ValueError("remote_computer_exec must be a list or tuple " diff --git a/aiida/orm/implementation/sqlalchemy/node.py b/aiida/orm/implementation/sqlalchemy/node.py index 6531544160..a8250c4c67 100644 --- a/aiida/orm/implementation/sqlalchemy/node.py +++ b/aiida/orm/implementation/sqlalchemy/node.py @@ -693,7 +693,10 @@ def store(self, with_transaction=True): self._store_cached_input_links(with_transaction=False) if with_transaction: - self.dbnode.session.commit() + try: + self.dbnode.session.commit() + except SQLAlchemyError as e: + self.dbnode.session.rollback() # This is one of the few cases where it is ok to do a 'global' # except, also because I am re-raising the exception