File tree Expand file tree Collapse file tree
torch_molecule/encoder/pretrained Expand file tree Collapse file tree Original file line number Diff line number Diff line change @@ -69,7 +69,7 @@ class HFPretrainedMolecularEncoder(BaseMolecularEncoder):
6969
7070 repo_id: "DeepChem/ChemBERTa-5M-MTR" (https://huggingface.co/DeepChem/ChemBERTa-5M-MTR)
7171
72- - UniKi/bert-base-smiles: UniKi's BERT model pretrained on SMILES strings.
72+ - UniKi/bert-base-smiles: BERT model pretrained on SMILES strings.
7373 Output dimension: 768.
7474
7575 repo_id: "unikei/bert-base-smiles" (https://huggingface.co/unikei/bert-base-smiles)
@@ -90,7 +90,8 @@ class HFPretrainedMolecularEncoder(BaseMolecularEncoder):
9090 batch_size : int, default=128
9191 Batch size used when encoding multiple molecules.
9292 add_bos_eos : Optional[bool], default=None
93- Whether to add beginning/end of sequence tokens. If None, determined automatically based on model type.
93+ Whether to add beginning/end of sequence tokens. If None, models in known_add_bos_eos_list will be set to True.
94+ The current known_add_bos_eos_list includes: ["entropy/gpt2_zinc_87m"].
9495 model_name : str, default="PretrainedMolecularEncoder"
9596 Name identifier for the model instance.
9697 verbose : bool, default=False
You can’t perform that action at this time.
0 commit comments