From 8e9532d667948e435fcb4f80faf37fccd6c4737e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 14 Jan 2025 13:51:10 +0800 Subject: [PATCH] [pre-commit.ci] pre-commit autoupdate (#1703) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.8.4 → v0.9.1](https://github.com/astral-sh/ruff-pre-commit/compare/v0.8.4...v0.9.1) --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- dpgen/auto_test/lib/abacus.py | 18 ++-- dpgen/auto_test/reproduce.py | 12 +-- dpgen/data/gen.py | 26 +++--- dpgen/data/surf.py | 6 +- dpgen/generator/lib/abacus_scf.py | 117 ++++++++++++------------- dpgen/generator/lib/calypso_run_opt.py | 6 +- dpgen/generator/lib/utils.py | 6 +- dpgen/generator/run.py | 56 ++++++------ tests/data/test_coll_abacus.py | 6 +- tests/generator/test_make_md.py | 4 +- tests/generator/test_post_fp.py | 48 +++++----- 12 files changed, 152 insertions(+), 155 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index acf546bda..fc68ebde7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -28,7 +28,7 @@ repos: # Python - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.4 + rev: v0.9.1 hooks: - id: ruff args: ["--fix"] diff --git a/dpgen/auto_test/lib/abacus.py b/dpgen/auto_test/lib/abacus.py index afdbda556..6d097dbaa 100644 --- a/dpgen/auto_test/lib/abacus.py +++ b/dpgen/auto_test/lib/abacus.py @@ -161,25 +161,25 @@ def poscar2stru(poscar, inter_param, stru="STRU"): else: atom_mass_dict = inter_param["atom_masses"] for atom in stru_data["atom_names"]: - assert ( - atom in atom_mass_dict - ), f"the mass of {atom} is not defined in interaction:atom_masses" + assert atom in atom_mass_dict, ( + f"the mass of {atom} is not defined in interaction:atom_masses" + ) atom_mass.append(atom_mass_dict[atom]) if "potcars" in inter_param: pseudo = [] for atom in stru_data["atom_names"]: - assert ( - atom in inter_param["potcars"] - ), f"the pseudopotential of {atom} is not defined in interaction:potcars" + assert atom in inter_param["potcars"], ( + f"the pseudopotential of {atom} is not defined in interaction:potcars" + ) pseudo.append("./pp_orb/" + inter_param["potcars"][atom].split("/")[-1]) if "orb_files" in inter_param: orb = [] for atom in stru_data["atom_names"]: - assert ( - atom in inter_param["orb_files"] - ), f"orbital file of {atom} is not defined in interaction:orb_files" + assert atom in inter_param["orb_files"], ( + f"orbital file of {atom} is not defined in interaction:orb_files" + ) orb.append("./pp_orb/" + inter_param["orb_files"][atom].split("/")[-1]) if "deepks_desc" in inter_param: diff --git a/dpgen/auto_test/reproduce.py b/dpgen/auto_test/reproduce.py index a2b919a5d..87a14d202 100644 --- a/dpgen/auto_test/reproduce.py +++ b/dpgen/auto_test/reproduce.py @@ -34,9 +34,9 @@ def make_repro( init_data_task_todo = glob.glob( os.path.join(init_data_path_todo, "task.[0-9]*[0-9]") ) - assert ( - len(init_data_task_todo) > 0 - ), "There is no task in previous calculations path" + assert len(init_data_task_todo) > 0, ( + "There is no task in previous calculations path" + ) init_data_task_todo.sort() task_list = [] @@ -122,9 +122,9 @@ def post_repro( init_data_task_todo = glob.glob( os.path.join(init_data_path_todo, "task.[0-9]*[0-9]") ) - assert ( - len(init_data_task_todo) > 0 - ), "There is no task in previous calculations path" + assert len(init_data_task_todo) > 0, ( + "There is no task in previous calculations path" + ) init_data_task_todo.sort() idid = 0 diff --git a/dpgen/data/gen.py b/dpgen/data/gen.py index 3e6940a86..484749ed7 100644 --- a/dpgen/data/gen.py +++ b/dpgen/data/gen.py @@ -584,9 +584,9 @@ def make_abacus_relax(jdata, mdata): raise RuntimeError("Cannot find any k-points information.") else: relax_kpt_path = jdata["relax_kpt"] - assert os.path.isfile( - relax_kpt_path - ), f"file {relax_kpt_path} should exists" + assert os.path.isfile(relax_kpt_path), ( + f"file {relax_kpt_path} should exists" + ) else: gamma_param = {"k_points": [1, 1, 1, 0, 0, 0]} ret_kpt = make_abacus_scf_kpt(gamma_param) @@ -595,9 +595,9 @@ def make_abacus_relax(jdata, mdata): raise RuntimeError("Cannot find any k-points information.") else: relax_kpt_path = jdata["relax_kpt"] - assert os.path.isfile( - relax_kpt_path - ), f"file {relax_kpt_path} should exists" + assert os.path.isfile(relax_kpt_path), ( + f"file {relax_kpt_path} should exists" + ) out_dir = jdata["out_dir"] cwd = os.getcwd() @@ -779,10 +779,10 @@ def pert_scaled(jdata): ### Loop over each perturbation for kk in range(pert_numb): if fp_style == "vasp": - pos_in = f"POSCAR{kk+1}.vasp" + pos_in = f"POSCAR{kk + 1}.vasp" elif fp_style == "abacus": - pos_in = f"STRU{kk+1}.abacus" - dir_out = f"{kk+1:06d}" + pos_in = f"STRU{kk + 1}.abacus" + dir_out = f"{kk + 1:06d}" create_path(dir_out) if fp_style == "vasp": pos_out = os.path.join(dir_out, "POSCAR") @@ -814,7 +814,7 @@ def pert_scaled(jdata): pos_in = "POSCAR" elif fp_style == "abacus": pos_in = "STRU" - dir_out = f"{kk+1:06d}" + dir_out = f"{kk + 1:06d}" create_path(dir_out) if fp_style == "vasp": pos_out = os.path.join(dir_out, "POSCAR") @@ -928,9 +928,9 @@ def make_abacus_md(jdata, mdata): raise RuntimeError("Cannot find any k-points information.") else: md_kpt_path = jdata["md_kpt"] - assert os.path.isfile( - md_kpt_path - ), f"file {md_kpt_path} should exists" + assert os.path.isfile(md_kpt_path), ( + f"file {md_kpt_path} should exists" + ) else: ret_kpt = make_abacus_scf_kpt({"k_points": [1, 1, 1, 0, 0, 0]}) else: diff --git a/dpgen/data/surf.py b/dpgen/data/surf.py index 3fba6a14b..3af1e9d60 100644 --- a/dpgen/data/surf.py +++ b/dpgen/data/surf.py @@ -525,8 +525,8 @@ def pert_scaled(jdata): poscar_elong(poscar_in, "POSCAR", ll) sp.check_call(pert_cmd, shell=True) for kk in range(pert_numb): - pos_in = f"POSCAR{kk+1}.vasp" - dir_out = f"{kk+1:06d}" + pos_in = f"POSCAR{kk + 1}.vasp" + dir_out = f"{kk + 1:06d}" create_path(dir_out) pos_out = os.path.join(dir_out, "POSCAR") poscar_shuffle(pos_in, pos_out) @@ -535,7 +535,7 @@ def pert_scaled(jdata): ### Handle special case (unperturbed ?) kk = -1 pos_in = "POSCAR" - dir_out = f"{kk+1:06d}" + dir_out = f"{kk + 1:06d}" create_path(dir_out) pos_out = os.path.join(dir_out, "POSCAR") poscar_shuffle(pos_in, pos_out) diff --git a/dpgen/generator/lib/abacus_scf.py b/dpgen/generator/lib/abacus_scf.py index 376c64eb0..96262620e 100644 --- a/dpgen/generator/lib/abacus_scf.py +++ b/dpgen/generator/lib/abacus_scf.py @@ -45,13 +45,12 @@ def make_abacus_scf_input(fp_params, extra_file_path=""): fp_params["kspacing"] = [ float(i) for i in fp_params["kspacing"].split() ] - assert ( - len(fp_params["kspacing"]) - in [ - 1, - 3, - ] - ), "'kspacing' only accept a float, or a list of one or three float, or a string of one or three float" + assert len(fp_params["kspacing"]) in [ + 1, + 3, + ], ( + "'kspacing' only accept a float, or a list of one or three float, or a string of one or three float" + ) ret += "kspacing " for ikspacing in fp_params["kspacing"]: assert ikspacing >= 0, "'kspacing' should be non-negative." @@ -78,9 +77,9 @@ def make_abacus_scf_input(fp_params, extra_file_path=""): elif key == "gamma_only": if isinstance(fp_params["gamma_only"], str): fp_params["gamma_only"] = int(eval(fp_params["gamma_only"])) - assert ( - fp_params["gamma_only"] == 0 or fp_params["gamma_only"] == 1 - ), "'gamma_only' should be either 0 or 1." + assert fp_params["gamma_only"] == 0 or fp_params["gamma_only"] == 1, ( + "'gamma_only' should be either 0 or 1." + ) ret += "gamma_only %d\n" % fp_params["gamma_only"] # noqa: UP031 elif key == "mixing_type": assert fp_params["mixing_type"] in [ @@ -93,22 +92,22 @@ def make_abacus_scf_input(fp_params, extra_file_path=""): ret += "mixing_type {}\n".format(fp_params["mixing_type"]) elif key == "mixing_beta": fp_params["mixing_beta"] = float(fp_params["mixing_beta"]) - assert ( - fp_params["mixing_beta"] >= 0 and fp_params["mixing_beta"] < 1 - ), "'mixing_beta' should between 0 and 1." + assert fp_params["mixing_beta"] >= 0 and fp_params["mixing_beta"] < 1, ( + "'mixing_beta' should between 0 and 1." + ) ret += "mixing_beta {:f}\n".format(fp_params["mixing_beta"]) elif key == "symmetry": if isinstance(fp_params["symmetry"], str): fp_params["symmetry"] = int(eval(fp_params["symmetry"])) - assert ( - fp_params["symmetry"] == 0 or fp_params["symmetry"] == 1 - ), "'symmetry' should be either 0 or 1." + assert fp_params["symmetry"] == 0 or fp_params["symmetry"] == 1, ( + "'symmetry' should be either 0 or 1." + ) ret += "symmetry %d\n" % fp_params["symmetry"] # noqa: UP031 elif key == "nbands": fp_params["nbands"] = int(fp_params["nbands"]) - assert fp_params["nbands"] > 0 and isinstance( - fp_params["nbands"], int - ), "'nbands' should be a positive integer." + assert fp_params["nbands"] > 0 and isinstance(fp_params["nbands"], int), ( + "'nbands' should be a positive integer." + ) ret += "nbands %d\n" % fp_params["nbands"] # noqa: UP031 elif key == "nspin": fp_params["nspin"] = int(fp_params["nspin"]) @@ -119,51 +118,49 @@ def make_abacus_scf_input(fp_params, extra_file_path=""): ), "'nspin' can anly take 1, 2 or 4" ret += "nspin %d\n" % fp_params["nspin"] # noqa: UP031 elif key == "ks_solver": - assert ( - fp_params["ks_solver"] - in [ - "cg", - "dav", - "lapack", - "genelpa", - "hpseps", - "scalapack_gvx", - ] - ), "'ks_sover' should in 'cgx', 'dav', 'lapack', 'genelpa', 'hpseps', 'scalapack_gvx'." + assert fp_params["ks_solver"] in [ + "cg", + "dav", + "lapack", + "genelpa", + "hpseps", + "scalapack_gvx", + ], ( + "'ks_sover' should in 'cgx', 'dav', 'lapack', 'genelpa', 'hpseps', 'scalapack_gvx'." + ) ret += "ks_solver {}\n".format(fp_params["ks_solver"]) elif key == "smearing_method": - assert ( - fp_params["smearing_method"] - in [ - "gauss", - "gaussian", - "fd", - "fixed", - "mp", - "mp2", - "mv", - ] - ), "'smearing_method' should in 'gauss', 'gaussian', 'fd', 'fixed', 'mp', 'mp2', 'mv'. " + assert fp_params["smearing_method"] in [ + "gauss", + "gaussian", + "fd", + "fixed", + "mp", + "mp2", + "mv", + ], ( + "'smearing_method' should in 'gauss', 'gaussian', 'fd', 'fixed', 'mp', 'mp2', 'mv'. " + ) ret += "smearing_method {}\n".format(fp_params["smearing_method"]) elif key == "smearing_sigma": fp_params["smearing_sigma"] = float(fp_params["smearing_sigma"]) - assert ( - fp_params["smearing_sigma"] >= 0 - ), "'smearing_sigma' should be non-negative." + assert fp_params["smearing_sigma"] >= 0, ( + "'smearing_sigma' should be non-negative." + ) ret += "smearing_sigma {:f}\n".format(fp_params["smearing_sigma"]) elif key == "cal_force": if isinstance(fp_params["cal_force"], str): fp_params["cal_force"] = int(eval(fp_params["cal_force"])) - assert ( - fp_params["cal_force"] == 0 or fp_params["cal_force"] == 1 - ), "'cal_force' should be either 0 or 1." + assert fp_params["cal_force"] == 0 or fp_params["cal_force"] == 1, ( + "'cal_force' should be either 0 or 1." + ) ret += "cal_force %d\n" % fp_params["cal_force"] # noqa: UP031 elif key == "cal_stress": if isinstance(fp_params["cal_stress"], str): fp_params["cal_stress"] = int(eval(fp_params["cal_stress"])) - assert ( - fp_params["cal_stress"] == 0 or fp_params["cal_stress"] == 1 - ), "'cal_stress' should be either 0 or 1." + assert fp_params["cal_stress"] == 0 or fp_params["cal_stress"] == 1, ( + "'cal_stress' should be either 0 or 1." + ) ret += "cal_stress %d\n" % fp_params["cal_stress"] # noqa: UP031 # paras for deepks elif key == "deepks_out_labels": @@ -180,16 +177,16 @@ def make_abacus_scf_input(fp_params, extra_file_path=""): fp_params["deepks_descriptor_lmax"] = int( fp_params["deepks_descriptor_lmax"] ) - assert ( - fp_params["deepks_descriptor_lmax"] >= 0 - ), "'deepks_descriptor_lmax' should be a positive integer." + assert fp_params["deepks_descriptor_lmax"] >= 0, ( + "'deepks_descriptor_lmax' should be a positive integer." + ) ret += "deepks_descriptor_lmax %d\n" % fp_params["deepks_descriptor_lmax"] # noqa: UP031 elif key == "deepks_scf": if isinstance(fp_params["deepks_scf"], str): fp_params["deepks_scf"] = int(eval(fp_params["deepks_scf"])) - assert ( - fp_params["deepks_scf"] == 0 or fp_params["deepks_scf"] == 1 - ), "'deepks_scf' should be either 0 or 1." + assert fp_params["deepks_scf"] == 0 or fp_params["deepks_scf"] == 1, ( + "'deepks_scf' should be either 0 or 1." + ) ret += "deepks_scf %d\n" % fp_params["deepks_scf"] # noqa: UP031 elif key == "deepks_model": ret += "deepks_model {}\n".format( @@ -228,9 +225,9 @@ def make_abacus_scf_stru( ret = "ATOMIC_SPECIES\n" for iatom in range(len(atom_names)): - assert ( - atom_names[iatom] in type_map - ), f"element {atom_names[iatom]} is not defined in type_map" + assert atom_names[iatom] in type_map, ( + f"element {atom_names[iatom]} is not defined in type_map" + ) idx = type_map.index(atom_names[iatom]) if "atom_masses" not in sys_data: ret += ( diff --git a/dpgen/generator/lib/calypso_run_opt.py b/dpgen/generator/lib/calypso_run_opt.py index fc7cf899a..1bc765af1 100644 --- a/dpgen/generator/lib/calypso_run_opt.py +++ b/dpgen/generator/lib/calypso_run_opt.py @@ -94,9 +94,9 @@ def read_stress_fmax(): try: f = open("input.dat") except Exception: - assert os.path.exists( - "../input.dat" - ), f" now we are in {os.getcwd()}, do not find ../input.dat" + assert os.path.exists("../input.dat"), ( + f" now we are in {os.getcwd()}, do not find ../input.dat" + ) f = open("../input.dat") lines = f.readlines() f.close() diff --git a/dpgen/generator/lib/utils.py b/dpgen/generator/lib/utils.py index 9757d5368..68e0c6cb3 100644 --- a/dpgen/generator/lib/utils.py +++ b/dpgen/generator/lib/utils.py @@ -102,9 +102,9 @@ def symlink_user_forward_files(mdata, task_type, work_path, task_format=None): task_format = {"train": "0*", "model_devi": "task.*", "fp": "task.*"} # "init_relax" : "sys-*", "init_md" : "sys-*/scale*/00*" for file in user_forward_files: - assert os.path.isfile( - file - ), f"user_forward_file {file} of {task_type} stage doesn't exist. " + assert os.path.isfile(file), ( + f"user_forward_file {file} of {task_type} stage doesn't exist. " + ) tasks = glob.glob(os.path.join(work_path, task_format[task_type])) for task in tasks: if os.path.isfile(os.path.join(task, os.path.basename(file))): diff --git a/dpgen/generator/run.py b/dpgen/generator/run.py index 99fa37f86..8cacb0e87 100644 --- a/dpgen/generator/run.py +++ b/dpgen/generator/run.py @@ -2042,7 +2042,7 @@ def run_md_model_devi(iter_index, jdata, mdata): else: num_digits = np.ceil(np.log10(nbeads + 1)).astype(int) backward_files += [ - f"model_devi{i+1:0{num_digits}d}.out" for i in range(nbeads) + f"model_devi{i + 1:0{num_digits}d}.out" for i in range(nbeads) ] backward_files += [f"log.lammps.{i:d}" for i in range(nbeads)] if model_devi_merge_traj: @@ -2116,11 +2116,7 @@ def run_md_model_devi(iter_index, jdata, mdata): ] elif model_devi_engine == "amber": commands = [ - ( - "TASK=$(basename $(pwd)) && " - "SYS1=${TASK:5:3} && " - "SYS=$((10#$SYS1)) && " - ) + ("TASK=$(basename $(pwd)) && SYS1=${TASK:5:3} && SYS=$((10#$SYS1)) && ") + model_devi_exec + ( " -O -p ../qmmm$SYS.parm7 -c init.rst7 -i ../init$SYS.mdin -o rc.mdout -r rc.rst7 -x rc.nc -inf rc.mdinfo -ref init.rst7" @@ -2263,7 +2259,9 @@ def _read_model_devi_file( assert all( model_devi_content.shape[0] == model_devi_contents[0].shape[0] for model_devi_content in model_devi_contents - ), r"Not all beads generated the same number of lines in the model_devi${ibead}.out file. Check your pimd task carefully." + ), ( + r"Not all beads generated the same number of lines in the model_devi${ibead}.out file. Check your pimd task carefully." + ) last_step = model_devi_contents[0][-1, 0] for ibead in range(1, num_beads): model_devi_contents[ibead][:, 0] = model_devi_contents[ibead][ @@ -2286,7 +2284,7 @@ def _read_model_devi_file( for ibead in range(num_beads): traj_files = glob.glob( os.path.join( - task_path, "traj", f"*lammpstrj{ibead+1:0{num_digits}d}" + task_path, "traj", f"*lammpstrj{ibead + 1:0{num_digits}d}" ) ) traj_files_sorted.append( @@ -2302,7 +2300,9 @@ def _read_model_devi_file( assert all( len(traj_list) == len(traj_files_sorted[0]) for traj_list in traj_files_sorted - ), "Not all beads generated the same number of frames. Check your pimd task carefully." + ), ( + "Not all beads generated the same number of frames. Check your pimd task carefully." + ) for ibead in range(num_beads): for itraj in range(len(traj_files_sorted[0])): base_path, original_filename = os.path.split( @@ -2311,7 +2311,7 @@ def _read_model_devi_file( frame_number = int(original_filename.split(".")[0]) new_filename = os.path.join( base_path, - f"{frame_number + ibead * (int(last_step)+1):d}.lammpstrj", + f"{frame_number + ibead * (int(last_step) + 1):d}.lammpstrj", ) os.rename(traj_files_sorted[ibead][itraj], new_filename) model_devi = np.loadtxt(os.path.join(task_path, "model_devi.out")) @@ -3247,9 +3247,9 @@ def sys_link_fp_vasp_pp(iter_index, jdata): fp_pp_path = os.path.abspath(fp_pp_path) type_map = jdata["type_map"] assert os.path.exists(fp_pp_path) - assert len(fp_pp_files) == len( - type_map - ), "size of fp_pp_files should be the same as the size of type_map" + assert len(fp_pp_files) == len(type_map), ( + "size of fp_pp_files should be the same as the size of type_map" + ) iter_name = make_iter_name(iter_index) work_path = os.path.join(iter_name, fp_name) @@ -3306,9 +3306,9 @@ def _link_fp_abacus_pporb_descript(iter_index, jdata): model_file = os.path.join( fp_pp_path, os.path.split(fp_dpks_model)[1] ) # only the filename - assert os.path.isfile( - model_file - ), f"Can not find the deepks model file {model_file}, which is defined in {ii}/INPUT" + assert os.path.isfile(model_file), ( + f"Can not find the deepks model file {model_file}, which is defined in {ii}/INPUT" + ) os.symlink(model_file, fp_dpks_model) # link to the model file # get pp, orb, descriptor filenames from STRU @@ -3323,9 +3323,9 @@ def _link_fp_abacus_pporb_descript(iter_index, jdata): if orb_files_stru: assert "fp_orb_files" in jdata, "need to define fp_orb_files in jdata" if descriptor_file_stru: - assert ( - "fp_dpks_descriptor" in jdata - ), "need to define fp_dpks_descriptor in jdata" + assert "fp_dpks_descriptor" in jdata, ( + "need to define fp_dpks_descriptor in jdata" + ) for idx, iatom in enumerate(atom_names): type_map_idx = type_map.index(iatom) @@ -3335,21 +3335,21 @@ def _link_fp_abacus_pporb_descript(iter_index, jdata): ) if pp_files_stru: src_file = os.path.join(fp_pp_path, jdata["fp_pp_files"][type_map_idx]) - assert os.path.isfile( - src_file - ), f"Can not find the pseudopotential file {src_file}" + assert os.path.isfile(src_file), ( + f"Can not find the pseudopotential file {src_file}" + ) os.symlink(src_file, pp_files_stru[idx]) if orb_files_stru: src_file = os.path.join(fp_pp_path, jdata["fp_orb_files"][type_map_idx]) - assert os.path.isfile( - src_file - ), f"Can not find the orbital file {src_file}" + assert os.path.isfile(src_file), ( + f"Can not find the orbital file {src_file}" + ) os.symlink(src_file, orb_files_stru[idx]) if descriptor_file_stru: src_file = os.path.join(fp_pp_path, jdata["fp_dpks_descriptor"]) - assert os.path.isfile( - src_file - ), f"Can not find the descriptor file {src_file}" + assert os.path.isfile(src_file), ( + f"Can not find the descriptor file {src_file}" + ) os.symlink(src_file, descriptor_file_stru) os.chdir(cwd) diff --git a/tests/data/test_coll_abacus.py b/tests/data/test_coll_abacus.py index 63e2d6e7d..2f8e2a59c 100644 --- a/tests/data/test_coll_abacus.py +++ b/tests/data/test_coll_abacus.py @@ -22,9 +22,9 @@ def setUp(self): with open(abacus_param_file) as fp: jdata = json.load(fp) self.odir = out_dir_name(jdata) - assert os.path.isdir( - "out_data_02_md_abacus" - ), "out data for post fp vasp should exist" + assert os.path.isdir("out_data_02_md_abacus"), ( + "out data for post fp vasp should exist" + ) if os.path.isdir(self.odir): shutil.rmtree(self.odir) shutil.copytree("out_data_02_md_abacus", self.odir) diff --git a/tests/generator/test_make_md.py b/tests/generator/test_make_md.py index 6f5cf177d..8b9f0449b 100644 --- a/tests/generator/test_make_md.py +++ b/tests/generator/test_make_md.py @@ -280,7 +280,7 @@ def test_read_model_devi_file_pimd(self): os.makedirs(os.path.join(path, "traj"), exist_ok=True) for i in range(4): for j in range(0, 5, 2): - with open(os.path.join(path, f"traj/{j}.lammpstrj{i+1}"), "a") as fp: + with open(os.path.join(path, f"traj/{j}.lammpstrj{i + 1}"), "a") as fp: fp.write(f"{i} {j}\n") model_devi_array = np.zeros([3, 7]) model_devi_array[:, 0] = np.array([0, 2, 4]) @@ -291,7 +291,7 @@ def test_read_model_devi_file_pimd(self): model_devi_array[:, 4] = 0.1 * (i + 1) * np.arange(1, 4) model_devi_total_array[i * 3 : (i + 1) * 3, 4] = model_devi_array[:, 4] np.savetxt( - os.path.join(path, f"model_devi{i+1}.out"), + os.path.join(path, f"model_devi{i + 1}.out"), model_devi_array, fmt="%.12e", ) diff --git a/tests/generator/test_post_fp.py b/tests/generator/test_post_fp.py index 72251328d..8446a30e1 100644 --- a/tests/generator/test_post_fp.py +++ b/tests/generator/test_post_fp.py @@ -31,9 +31,9 @@ class TestPostFPVasp(unittest.TestCase): def setUp(self): - assert os.path.isdir( - "out_data_post_fp_vasp" - ), "out data for post fp vasp should exist" + assert os.path.isdir("out_data_post_fp_vasp"), ( + "out data for post fp vasp should exist" + ) if os.path.isdir("iter.000000"): shutil.rmtree("iter.000000") shutil.copytree("out_data_post_fp_vasp", "iter.000000") @@ -183,9 +183,9 @@ def setUp(self): self.e_places = 5 self.f_places = 5 self.v_places = 2 - assert os.path.isdir( - "out_data_post_fp_pwscf" - ), "out data for post fp pwscf should exist" + assert os.path.isdir("out_data_post_fp_pwscf"), ( + "out data for post fp pwscf should exist" + ) if os.path.isdir("iter.000000"): shutil.rmtree("iter.000000") shutil.copytree("out_data_post_fp_pwscf", "iter.000000") @@ -204,9 +204,9 @@ def setUp(self): self.e_places = 5 self.f_places = 5 self.v_places = 2 - assert os.path.isdir( - "out_data_post_fp_abacus" - ), "out data for post fp pwscf should exist" + assert os.path.isdir("out_data_post_fp_abacus"), ( + "out data for post fp pwscf should exist" + ) if os.path.isdir("iter.000000"): shutil.rmtree("iter.000000") shutil.copytree("out_data_post_fp_abacus", "iter.000000") @@ -228,9 +228,9 @@ def setUp(self): self.e_places = 5 self.f_places = 5 self.v_places = 5 - assert os.path.isdir( - "out_data_post_fp_siesta" - ), "out data for post fp siesta should exist" + assert os.path.isdir("out_data_post_fp_siesta"), ( + "out data for post fp siesta should exist" + ) if os.path.isdir("iter.000000"): shutil.rmtree("iter.000000") shutil.copytree("out_data_post_fp_siesta", "iter.000000") @@ -249,9 +249,9 @@ def setUp(self): self.e_places = 5 self.f_places = 5 self.v_places = 5 - assert os.path.isdir( - "out_data_post_fp_gaussian" - ), "out data for post fp gaussian should exist" + assert os.path.isdir("out_data_post_fp_gaussian"), ( + "out data for post fp gaussian should exist" + ) if os.path.isdir("iter.000000"): shutil.rmtree("iter.000000") shutil.copytree("out_data_post_fp_gaussian", "iter.000000") @@ -270,9 +270,9 @@ def setUp(self): self.e_places = 5 self.f_places = 5 self.v_places = 5 - assert os.path.isdir( - "out_data_post_fp_cp2k" - ), "out data for post fp cp2k should exist" + assert os.path.isdir("out_data_post_fp_cp2k"), ( + "out data for post fp cp2k should exist" + ) if os.path.isdir("iter.000000"): shutil.rmtree("iter.000000") shutil.copytree("out_data_post_fp_cp2k", "iter.000000") @@ -291,9 +291,9 @@ def setUp(self): self.e_places = 5 self.f_places = 5 self.v_places = 2 - assert os.path.isdir( - "out_data_post_fp_pwmat" - ), "out data for post fp pwmat should exist" + assert os.path.isdir("out_data_post_fp_pwmat"), ( + "out data for post fp pwmat should exist" + ) if os.path.isdir("iter.000000"): shutil.rmtree("iter.000000") shutil.copytree("out_data_post_fp_pwmat", "iter.000000") @@ -339,9 +339,9 @@ def setUp(self): self.e_places = 5 self.f_places = 5 self.v_places = 2 - assert os.path.isdir( - "out_data_post_fp_pwmat" - ), "out data for post fp pwmat should exist" + assert os.path.isdir("out_data_post_fp_pwmat"), ( + "out data for post fp pwmat should exist" + ) if os.path.isdir("iter.000000"): shutil.rmtree("iter.000000") with open(param_custom_fp_file) as fp: