Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions deepmd/pt_expt/utils/serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,7 @@ def _collect_metadata(model: torch.nn.Module) -> dict:
"mixed_types": model.mixed_types(),
"sel_type": model.get_sel_type(),
"has_default_fparam": model.has_default_fparam(),
"default_fparam": model.get_default_fparam(),
Comment thread
wanghan-iapcm marked this conversation as resolved.
"fitting_output_defs": fitting_output_defs,
}

Expand Down
1 change: 1 addition & 0 deletions source/api_cc/include/DeepPotPTExpt.h
Original file line number Diff line number Diff line change
Expand Up @@ -202,6 +202,7 @@ class DeepPotPTExpt : public DeepPotBackend {
int daparam;
bool aparam_nall;
bool has_default_fparam_;
std::vector<double> default_fparam_;
double rcut;
int gpu_id;
bool gpu_enabled;
Expand Down
41 changes: 41 additions & 0 deletions source/api_cc/src/DeepPotPTExpt.cc
Original file line number Diff line number Diff line change
Expand Up @@ -636,6 +636,25 @@ void DeepPotPTExpt::init(const std::string& model,
} else {
has_default_fparam_ = false;
}
if (has_default_fparam_) {
if (metadata.obj_val.count("default_fparam")) {
default_fparam_.clear();
for (const auto& v : metadata["default_fparam"].as_array()) {
default_fparam_.push_back(v.as_double());
}
if (static_cast<int>(default_fparam_.size()) != dfparam) {
throw deepmd::deepmd_exception(
"default_fparam length (" + std::to_string(default_fparam_.size()) +
") does not match dim_fparam (" + std::to_string(dfparam) + ").");
}
} else {
std::cerr << "WARNING: Model has has_default_fparam=true but "
"default_fparam values are missing from metadata. "
"Empty fparam will not be substituted. Please regenerate "
"the .pt2 model with an updated version of deepmd-kit."
<< std::endl;
}
}
Comment thread
wanghan-iapcm marked this conversation as resolved.

type_map.clear();
for (const auto& v : metadata["type_map"].as_array()) {
Expand Down Expand Up @@ -818,6 +837,17 @@ void DeepPotPTExpt::compute(ENERGYVTYPE& ener,
valuetype_options)
.to(torch::kFloat64)
.to(device);
} else if (has_default_fparam_ && !default_fparam_.empty()) {
fparam_tensor =
torch::from_blob(const_cast<double*>(default_fparam_.data()),
{1, static_cast<std::int64_t>(default_fparam_.size())},
options)
.clone()
.to(device);
} else if (has_default_fparam_) {
throw deepmd::deepmd_exception(
"fparam is empty and default_fparam values are missing from the .pt2 "
"metadata. Please regenerate the model or provide fparam explicitly.");
} else {
fparam_tensor = torch::zeros({0}, options).to(device);
}
Expand Down Expand Up @@ -1052,6 +1082,17 @@ void DeepPotPTExpt::compute(ENERGYVTYPE& ener,
valuetype_options)
.to(torch::kFloat64)
.to(device);
} else if (has_default_fparam_ && !default_fparam_.empty()) {
fparam_tensor =
torch::from_blob(const_cast<double*>(default_fparam_.data()),
{1, static_cast<std::int64_t>(default_fparam_.size())},
options)
.clone()
.to(device);
} else if (has_default_fparam_) {
throw deepmd::deepmd_exception(
"fparam is empty and default_fparam values are missing from the .pt2 "
"metadata. Please regenerate the model or provide fparam explicitly.");
} else {
fparam_tensor = torch::zeros({0}, options).to(device);
}
Expand Down
25 changes: 0 additions & 25 deletions source/api_cc/tests/test_deeppot_a_fparam_aparam_ptexpt.cc
Original file line number Diff line number Diff line change
Expand Up @@ -345,28 +345,3 @@ TYPED_TEST(TestInferDeepPotNoDefaultFParamPtExpt, no_default_fparam) {
EXPECT_EQ(dp.dim_fparam(), 1);
EXPECT_FALSE(dp.has_default_fparam());
}

// Test that a .pt2 model with default_fparam reports true
template <class VALUETYPE>
class TestInferDeepPotDefaultFParamPtExpt : public ::testing::Test {
protected:
deepmd::DeepPot dp;

void SetUp() override {
#ifndef BUILD_PYTORCH
GTEST_SKIP() << "Skip because PyTorch support is not enabled.";
#endif
dp.init("../../tests/infer/fparam_aparam_default.pt2");
};

void TearDown() override {};
};

TYPED_TEST_SUITE(TestInferDeepPotDefaultFParamPtExpt, ValueTypes);

TYPED_TEST(TestInferDeepPotDefaultFParamPtExpt, has_default_fparam) {
using VALUETYPE = TypeParam;
deepmd::DeepPot& dp = this->dp;
EXPECT_EQ(dp.dim_fparam(), 1);
EXPECT_TRUE(dp.has_default_fparam());
}
220 changes: 220 additions & 0 deletions source/api_cc/tests/test_deeppot_default_fparam_ptexpt.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,220 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// Test C++ inference for pt_expt (.pt2) backend with default_fparam.
// Uses fparam_aparam_default.pt2: has_default_fparam=true,
// default=[0.25852028].
#include <gtest/gtest.h>

#include <algorithm>
#include <cmath>
#include <fstream>
#include <vector>

#include "DeepPot.h"
#include "neighbor_list.h"
#include "test_utils.h"

// 1e-10 cannot pass; unclear bug or not
#undef EPSILON
#define EPSILON (std::is_same<VALUETYPE, double>::value ? 1e-7 : 1e-4)

template <class VALUETYPE>
class TestInferDeepPotDefaultFParamPtExpt : public ::testing::Test {
protected:
std::vector<VALUETYPE> coord = {12.83, 2.56, 2.18, 12.09, 2.87, 2.74,
00.25, 3.32, 1.68, 3.36, 3.00, 1.81,
3.51, 2.51, 2.60, 4.27, 3.22, 1.56};
std::vector<int> atype = {0, 0, 0, 0, 0, 0};
std::vector<VALUETYPE> box = {13., 0., 0., 0., 13., 0., 0., 0., 13.};
// aparam is still provided explicitly
std::vector<VALUETYPE> aparam = {0.25852028, 0.25852028, 0.25852028,
0.25852028, 0.25852028, 0.25852028};
// explicit fparam for backward compat test
std::vector<VALUETYPE> fparam = {0.25852028};
// expected values computed with default fparam
std::vector<VALUETYPE> expected_e = {
1.596836265688982293e-01, 1.596933624175455035e-01,
1.596859462832928844e-01, 1.596779837732069107e-01,
1.596776702807257142e-01, 1.596869048501883825e-01};
std::vector<VALUETYPE> expected_f = {
1.112134318320098417e-05, 1.085230789880272913e-04,
9.298442641358670786e-07, 1.491517597320257801e-04,
-1.250419527572718750e-05, -9.768265174690742383e-05,
-5.021052645725076108e-05, -9.741678916887853762e-05,
9.375317764392499637e-05, 8.664103999852429459e-05,
-4.538513400016661465e-05, 8.561605116672728300e-05,
-2.454811055475983474e-05, 1.079491454988312375e-04,
-1.656974003674590982e-04, -1.721555059017404292e-04,
-6.116610604208619416e-05, 8.308097903957838235e-05};
std::vector<VALUETYPE> expected_v = {
-1.264062189119718516e-04, -1.636544077308298682e-05,
4.453224130911559301e-05, -7.947403699518174416e-06,
-4.603504987332694676e-05, 9.491045850088937973e-06,
4.131028921467351392e-05, 9.691472468201808941e-06,
-3.323572704427467454e-05, -1.024556912293136224e-04,
5.530809120954762511e-06, 5.211030391191995666e-05,
-3.851138686809712045e-06, 2.101414374152978974e-07,
3.247573516972806439e-06, 4.561253716254927361e-05,
-3.865680092083368681e-06, -3.262252150841838630e-05,
-1.166788692566848309e-04, -1.814499890570951256e-05,
2.155064011880963138e-05, -1.629918981392338952e-05,
-3.245631268444034052e-05, 2.968538417601219450e-05,
2.463149007223181425e-05, 3.660689861518750502e-05,
-3.586518711234942813e-05, -1.424206401855391917e-04,
-1.017840928263479808e-05, 1.421307534994556974e-05,
-8.618294024757196269e-06, -2.192409332705388475e-05,
3.461715847634955364e-05, 1.277625693457723244e-05,
3.486479415793142304e-05, -5.604161168847289151e-05,
-8.612844407008964597e-05, 2.508361660152530129e-06,
-1.633895954533155651e-07, 1.903591783622965016e-06,
-3.028341203071198989e-05, 4.685511271783774690e-05,
2.876824509984395433e-06, 4.576515617130287920e-05,
-7.108738780331674085e-05, -1.062354815105980244e-04,
-2.954644717832236758e-05, 4.075640001084843372e-05,
-3.138369091725702186e-05, -1.316088004849702041e-05,
1.786389692843502177e-05, 4.579187321116953935e-05,
1.869753034515599593e-05, -2.550749273395904029e-05};
int natoms;
double expected_tot_e;
std::vector<VALUETYPE> expected_tot_v;

deepmd::DeepPot dp;

void SetUp() override {
#ifndef BUILD_PYTORCH
GTEST_SKIP() << "Skip because PyTorch support is not enabled.";
#endif
dp.init("../../tests/infer/fparam_aparam_default.pt2");
Comment thread
wanghan-iapcm marked this conversation as resolved.

natoms = expected_e.size();
EXPECT_EQ(natoms * 3, expected_f.size());
EXPECT_EQ(natoms * 9, expected_v.size());
expected_tot_e = 0.;
expected_tot_v.resize(9);
std::fill(expected_tot_v.begin(), expected_tot_v.end(), 0.);
for (int ii = 0; ii < natoms; ++ii) {
expected_tot_e += expected_e[ii];
}
for (int ii = 0; ii < natoms; ++ii) {
for (int dd = 0; dd < 9; ++dd) {
expected_tot_v[dd] += expected_v[ii * 9 + dd];
}
}
};

void TearDown() override {};
};

TYPED_TEST_SUITE(TestInferDeepPotDefaultFParamPtExpt, ValueTypes);

TYPED_TEST(TestInferDeepPotDefaultFParamPtExpt, attrs) {
using VALUETYPE = TypeParam;
deepmd::DeepPot& dp = this->dp;
EXPECT_EQ(dp.dim_fparam(), 1);
EXPECT_EQ(dp.dim_aparam(), 1);
EXPECT_TRUE(dp.has_default_fparam());
}

TYPED_TEST(TestInferDeepPotDefaultFParamPtExpt, cpu_build_nlist_empty_fparam) {
using VALUETYPE = TypeParam;
std::vector<VALUETYPE>& coord = this->coord;
std::vector<int>& atype = this->atype;
std::vector<VALUETYPE>& box = this->box;
std::vector<VALUETYPE>& aparam = this->aparam;
std::vector<VALUETYPE>& expected_e = this->expected_e;
std::vector<VALUETYPE>& expected_f = this->expected_f;
int& natoms = this->natoms;
double& expected_tot_e = this->expected_tot_e;
std::vector<VALUETYPE>& expected_tot_v = this->expected_tot_v;
deepmd::DeepPot& dp = this->dp;
double ener;
std::vector<VALUETYPE> force, virial;
// Empty fparam — model should use default
std::vector<VALUETYPE> empty_fparam;
dp.compute(ener, force, virial, coord, atype, box, empty_fparam, aparam);

EXPECT_EQ(force.size(), natoms * 3);
EXPECT_EQ(virial.size(), 9);

EXPECT_LT(fabs(ener - expected_tot_e), EPSILON);
for (int ii = 0; ii < natoms * 3; ++ii) {
EXPECT_LT(fabs(force[ii] - expected_f[ii]), EPSILON);
}
for (int ii = 0; ii < 3 * 3; ++ii) {
EXPECT_LT(fabs(virial[ii] - expected_tot_v[ii]), EPSILON);
}
}

TYPED_TEST(TestInferDeepPotDefaultFParamPtExpt,
cpu_build_nlist_explicit_fparam) {
using VALUETYPE = TypeParam;
std::vector<VALUETYPE>& coord = this->coord;
std::vector<int>& atype = this->atype;
std::vector<VALUETYPE>& box = this->box;
std::vector<VALUETYPE>& fparam = this->fparam;
std::vector<VALUETYPE>& aparam = this->aparam;
std::vector<VALUETYPE>& expected_e = this->expected_e;
std::vector<VALUETYPE>& expected_f = this->expected_f;
int& natoms = this->natoms;
double& expected_tot_e = this->expected_tot_e;
std::vector<VALUETYPE>& expected_tot_v = this->expected_tot_v;
deepmd::DeepPot& dp = this->dp;
double ener;
std::vector<VALUETYPE> force, virial;
// Explicit fparam — backward compat
dp.compute(ener, force, virial, coord, atype, box, fparam, aparam);

EXPECT_EQ(force.size(), natoms * 3);
EXPECT_EQ(virial.size(), 9);

EXPECT_LT(fabs(ener - expected_tot_e), EPSILON);
for (int ii = 0; ii < natoms * 3; ++ii) {
EXPECT_LT(fabs(force[ii] - expected_f[ii]), EPSILON);
}
for (int ii = 0; ii < 3 * 3; ++ii) {
EXPECT_LT(fabs(virial[ii] - expected_tot_v[ii]), EPSILON);
}
}

TYPED_TEST(TestInferDeepPotDefaultFParamPtExpt, cpu_lmp_nlist_empty_fparam) {
using VALUETYPE = TypeParam;
std::vector<VALUETYPE>& coord = this->coord;
std::vector<int>& atype = this->atype;
std::vector<VALUETYPE>& box = this->box;
std::vector<VALUETYPE>& aparam = this->aparam;
std::vector<VALUETYPE>& expected_f = this->expected_f;
int& natoms = this->natoms;
double& expected_tot_e = this->expected_tot_e;
std::vector<VALUETYPE>& expected_tot_v = this->expected_tot_v;
deepmd::DeepPot& dp = this->dp;
float rc = dp.cutoff();
int nloc = coord.size() / 3;
std::vector<VALUETYPE> coord_cpy;
std::vector<int> atype_cpy, mapping;
std::vector<std::vector<int> > nlist_data;
_build_nlist<VALUETYPE>(nlist_data, coord_cpy, atype_cpy, mapping, coord,
atype, box, rc);
int nall = coord_cpy.size() / 3;
std::vector<int> ilist(nloc), numneigh(nloc);
std::vector<int*> firstneigh(nloc);
deepmd::InputNlist inlist(nloc, &ilist[0], &numneigh[0], &firstneigh[0]);
convert_nlist(inlist, nlist_data);

double ener;
std::vector<VALUETYPE> force_, virial;
std::vector<VALUETYPE> empty_fparam;
dp.compute(ener, force_, virial, coord_cpy, atype_cpy, box, nall - nloc,
inlist, 0, empty_fparam, aparam);
std::vector<VALUETYPE> force;
_fold_back<VALUETYPE>(force, force_, mapping, nloc, nall, 3);

EXPECT_EQ(force.size(), natoms * 3);
EXPECT_EQ(virial.size(), 9);

EXPECT_LT(fabs(ener - expected_tot_e), EPSILON);
for (int ii = 0; ii < natoms * 3; ++ii) {
EXPECT_LT(fabs(force[ii] - expected_f[ii]), EPSILON);
}
for (int ii = 0; ii < 3 * 3; ++ii) {
EXPECT_LT(fabs(virial[ii] - expected_tot_v[ii]), EPSILON);
}
}
41 changes: 41 additions & 0 deletions source/tests/infer/gen_fparam_aparam.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,10 @@ def main():
print(f"Exporting to {pt2_default_path} ...") # noqa: T201
pt_expt_deserialize_to_file(pt2_default_path, copy.deepcopy(data_default))

# NOTE: fparam_aparam_default.pth is committed to git (generated by PR #5311),
# NOT regenerated here — it has its own expected values in
# test_deeppot_default_fparam_pt.cc.

# ---- 4. Run inference via DeepPot to get reference values ----
from deepmd.infer import (
DeepPot,
Expand Down Expand Up @@ -193,6 +197,43 @@ def main():
print(f" {vv:.18e}{comma}") # noqa: T201
print(" };") # noqa: T201

# ---- 4b. Reference values for default_fparam model (.pt2) ----
dp_default = DeepPot(pt2_default_path)
e_d, f_d, _v_d, ae_d, av_d = dp_default.eval(
coord,
box,
atype,
fparam=fparam_val,
aparam=aparam_val,
atomic=True,
)
atom_energy_d = ae_d[0, :, 0]
force_d = f_d[0]
atom_virial_d = av_d[0]

print("\n// ---- Reference values for C++ default_fparam .pt2 test ----") # noqa: T201
print(f"// Total energy: {e_d[0, 0]:.18e}") # noqa: T201
print() # noqa: T201
print(" std::vector<VALUETYPE> expected_e = {") # noqa: T201
for ii, ev in enumerate(atom_energy_d):
comma = "," if ii < len(atom_energy_d) - 1 else ""
print(f" {ev:.18e}{comma}") # noqa: T201
print(" };") # noqa: T201

print(" std::vector<VALUETYPE> expected_f = {") # noqa: T201
force_flat_d = force_d.flatten()
for ii, fv in enumerate(force_flat_d):
comma = "," if ii < len(force_flat_d) - 1 else ""
print(f" {fv:.18e}{comma}") # noqa: T201
print(" };") # noqa: T201

print(" std::vector<VALUETYPE> expected_v = {") # noqa: T201
virial_flat_d = atom_virial_d.flatten()
for ii, vv in enumerate(virial_flat_d):
comma = "," if ii < len(virial_flat_d) - 1 else ""
print(f" {vv:.18e}{comma}") # noqa: T201
print(" };") # noqa: T201

# ---- 5. Verify .pth gives same results ----
if pth_exported:
dp_pth = DeepPot(pth_path)
Expand Down
Loading