Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
91 commits
Select commit Hold shift + click to select a range
c7e9c0a
Adds angstrom to bohr conversion factor
calvinp0 Feb 9, 2026
c727294
Adds CREST settings and installation for transition state search
calvinp0 Feb 9, 2026
e265cdd
Adds CREST TS search adapter
calvinp0 Feb 9, 2026
432e594
Adds CREST documentation
calvinp0 Feb 9, 2026
5d1612d
Normalizes TSGuess method sources
calvinp0 Feb 9, 2026
8891252
Adds CREST to available TS search methods
calvinp0 Feb 9, 2026
146a145
Adds CREST as TS adapter option
calvinp0 Feb 9, 2026
abd9201
Adds function to reorder XYZ strings.
calvinp0 Feb 9, 2026
87f3c51
Fixes restart tests for parallel execution
calvinp0 Feb 9, 2026
0baa098
Added the job pipe sub-module
alongd Apr 3, 2026
cab5b28
Updated the pipe submission script
alongd Apr 3, 2026
f8d102b
Added pipe_settings
alongd Apr 3, 2026
24ac9db
Added the pipe_worker script
alongd Apr 3, 2026
9e7cab6
Removed the previous pipe implementation
alongd Apr 3, 2026
96c70ce
Implement pipe into Scheduler
alongd Apr 3, 2026
1b2a9cb
Docs: pipe mode
alongd Apr 3, 2026
1b20560
Merge branch 'main' into crest_adapter
calvinp0 Apr 5, 2026
a18828f
Merge branch 'pipe' into crest_adapter
calvinp0 Apr 5, 2026
42b0798
Fixes
calvinp0 Apr 5, 2026
63207c4
Adjustments
calvinp0 Apr 5, 2026
5c7dc77
Update
calvinp0 Apr 5, 2026
c46792c
Thread back to Scheduler after completion of TS pipeline
calvinp0 Apr 6, 2026
9840d33
Race to condition
calvinp0 Apr 6, 2026
15503d9
Better reporting
calvinp0 Apr 6, 2026
de8e67b
Added troubleshooting of failed indiv. pipe jobs
calvinp0 Apr 6, 2026
6ae8b8c
Further updates
calvinp0 Apr 6, 2026
00b5e2f
Updates
calvinp0 Apr 6, 2026
a9ac64f
Scratch fix
calvinp0 Apr 6, 2026
c26d5b2
Path sep
calvinp0 Apr 6, 2026
7986864
Handle existing pipe directories on fresh start
calvinp0 Apr 7, 2026
b995e05
Handle existing pipe directories on fresh start
calvinp0 Apr 7, 2026
5e67ea4
Add FAILED_ESS task state and document pipe task lifecycle
calvinp0 Apr 7, 2026
3be97bf
Fix premature 'all conformer jobs terminated' when others still running
calvinp0 Apr 7, 2026
c5b659f
Move pipe directories from runs/ into calcs/ with auto-indexing
calvinp0 Apr 7, 2026
a985a3c
Document pipe directory structure in advanced.rst
calvinp0 Apr 7, 2026
b3b54ec
Write task_summary.txt on pipe run completion
calvinp0 Apr 7, 2026
1267ea5
Write task_summary.txt on pipe run completion
calvinp0 Apr 7, 2026
52c558d
Add tests for pipe directory restructure, ESS error classification, a…
calvinp0 Apr 7, 2026
9d86a9a
Add tests for pipe directory restructure, ESS error classification, a…
calvinp0 Apr 7, 2026
86d8f12
Fix premature check_all_done when species has active pipe run
calvinp0 Apr 7, 2026
456bca8
Fix unused variable in pipe_state_test
calvinp0 Apr 7, 2026
41f0454
Fix worker tests: mock _parse_ess_error for mockter adapter
calvinp0 Apr 8, 2026
b948f75
Fix species tests for method_sources in TSGuess
calvinp0 Apr 8, 2026
7a4c61a
Merge remote-tracking branch 'origin/main' into crest_adapter_pipe
calvinp0 Apr 8, 2026
02cddd7
Improve Arkane AEC and BAC validation and logging
calvinp0 Apr 8, 2026
21f1880
Resolve AEC and BAC keys independently for Arkane corrections
calvinp0 Apr 8, 2026
3f21891
Enhance Arkane level of theory identification and AEC validation logging
calvinp0 Apr 8, 2026
591b492
Support independent AEC and BAC keys in the QM corrections script
calvinp0 Apr 8, 2026
7425633
Merge branch 'arkane_m_fix' into crest_adapter_pipe
calvinp0 Apr 8, 2026
254a29f
Improve Arkane AEC and BAC validation and logging
calvinp0 Apr 8, 2026
78fc445
Resolve AEC and BAC keys independently for Arkane corrections
calvinp0 Apr 8, 2026
7261198
Enhance Arkane level of theory identification and AEC validation logging
calvinp0 Apr 8, 2026
938c49b
Support independent AEC and BAC keys in the QM corrections script
calvinp0 Apr 8, 2026
801498b
Merge branch 'arkane_m_fix' into crest_adapter_pipe
calvinp0 Apr 8, 2026
1e71353
Fix stranding species when conformer troubleshooting is exhausted
calvinp0 Apr 9, 2026
94aa7d5
Refine pipe resubmission logic to prevent redundant scheduler jobs
calvinp0 Apr 9, 2026
f3bc145
Handle monoatomic species for DLPNO methods
calvinp0 Apr 10, 2026
dd796ed
Handle monoatomic species for DLPNO methods in Orca
calvinp0 Apr 10, 2026
abeabd2
Handle DLPNO methods for monoatomic species
calvinp0 Apr 10, 2026
a4cf6fb
Fix switch_ts to reset state & clean up IRC when switching TS guesses
calvinp0 Apr 11, 2026
994d6c4
Pass server job IDs to the pipe coordinator during polling
calvinp0 Apr 12, 2026
701f986
Handle task initialization failures in the pipe worker
calvinp0 Apr 12, 2026
58b8fd5
Handle terminated scheduler jobs during pipe reconciliation
calvinp0 Apr 12, 2026
86b8044
Detect terminated scheduler jobs during pipe polling
calvinp0 Apr 12, 2026
d7258d3
Added CANCELLED
calvinp0 Apr 12, 2026
a7e9f63
Merge branch 'main' into crest_adapter_pipe
calvinp0 Apr 12, 2026
5646c5b
Merge branch 'pipe_job_cancel' into crest_adapter_pipe
calvinp0 Apr 12, 2026
2dbae7a
Merge branch 'main' into crest_adapter_pipe
calvinp0 Apr 14, 2026
eb83cc8
Fix pipe jobs immediately failing due to stale server_job_ids snapshot
calvinp0 Apr 14, 2026
b8c6644
Fix switch_ts resetting rotors convergence flag and stale rotors_dict
calvinp0 Apr 15, 2026
9e1b3fa
Wire Level.cabs into ORCA input file generation
calvinp0 Apr 18, 2026
ef121b6
Emit no whitespace when cabs is unset
calvinp0 Apr 18, 2026
885fa2f
Raise before ORCA submission when F12 method lacks a CABS basis
calvinp0 Apr 18, 2026
1a59773
Preserve aux_basis/cabs/solvation when stripping DLPNO for monoatomics
calvinp0 Apr 18, 2026
8c68b90
Auto-retry non-RI F12-CC jobs with /RI on UHF references (ORCA)
calvinp0 Apr 18, 2026
b40f76e
Only strip DLPNO for single-electron monoatomics (H/D/T), not all atoms
calvinp0 Apr 18, 2026
c6b87c4
Stop duplicating scf_convergence / dlpno_threshold keywords
calvinp0 Apr 18, 2026
87ce966
Dedup DFT grid keyword the same way as SCF/DLPNO
calvinp0 Apr 18, 2026
9182a1a
Merge branch 'main' into crest_adapter
calvinp0 Apr 18, 2026
0149310
Fixes
calvinp0 Apr 18, 2026
3e8a7e0
Diagnostics.
calvinp0 Apr 18, 2026
d7a2916
Improve Orca memory troubleshooting for capped total memory
calvinp0 Apr 19, 2026
38a7e0f
Refine memory error detection and ORCA memory troubleshooting logic
calvinp0 Apr 19, 2026
4febdd8
Enable immediate troubleshooting for failed ESS pipe tasks during eje…
calvinp0 Apr 19, 2026
2ac4488
Enhance memory error detection and tracking for capped memory jobs
calvinp0 Apr 19, 2026
3ef6716
.
calvinp0 Apr 20, 2026
2ef5e15
.
calvinp0 Apr 20, 2026
d2d75cb
Skip 2D-graph isomorphism enforcement for TS species
calvinp0 Apr 20, 2026
47e2b1b
mapping/engine: two-pass pairing to reject formula-only matches
calvinp0 Apr 20, 2026
2615bb6
mapping/driver: surface failed pair mappings and retry next product dict
calvinp0 Apr 20, 2026
dc2d4c0
reaction: derive reactive bonds from RMG family when atom_map is absent
calvinp0 Apr 20, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ help:
@echo " install-kinbot Install KinBot"
@echo " install-sella Install Sella"
@echo " install-xtb Install xTB"
@echo " install-crest Install CREST"
@echo " install-torchani Install TorchANI"
@echo " install-ob Install OpenBabel"
@echo ""
Expand Down Expand Up @@ -100,6 +101,9 @@ install-sella:
install-xtb:
bash $(DEVTOOLS_DIR)/install_xtb.sh

install-crest:
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

did we add install-crest into install-all as well?

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

see comment above

bash $(DEVTOOLS_DIR)/install_crest.sh

install-torchani:
bash $(DEVTOOLS_DIR)/install_torchani.sh

Expand Down
2 changes: 1 addition & 1 deletion arc/constants.pxd
Original file line number Diff line number Diff line change
@@ -1 +1 @@
cdef double pi, Na, kB, R, h, hbar, c, e, m_e, m_p, m_n, amu, a0, bohr_to_angstrom, E_h, F, E_h_kJmol
cdef double pi, Na, kB, R, h, hbar, c, e, m_e, m_p, m_n, amu, a0, E_h, F, E_h_kJmol, bohr_to_angstrom, angstrom_to_bohr
4 changes: 4 additions & 0 deletions arc/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@

#: The Bohr radius :math:`a_0` in :math:`\mathrm{m}`
a0 = 5.2917721092e-11
bohr_to_angstrom = a0 * 1e10

Check warning

Code scanning / CodeQL

Variable defined multiple times Warning

This assignment to 'bohr_to_angstrom' is unnecessary as it is
redefined
before this value is used.

#: The atomic mass unit in :math:`\mathrm{kg}`
amu = 1.660538921e-27
Expand Down Expand Up @@ -79,6 +79,9 @@
#: Vacuum permittivity
epsilon_0 = 8.8541878128

bohr_to_angstrom = 0.529177
angstrom_to_bohr = 1 / bohr_to_angstrom

# Cython does not automatically place module-level variables into the module
# symbol table when in compiled mode, so we must do this manually so that we
# can use the constants from both Cython and regular Python code
Expand All @@ -101,4 +104,5 @@
'F': F,
'epsilon_0': epsilon_0,
'bohr_to_angstrom': bohr_to_angstrom,
'angstrom_to_bohr': angstrom_to_bohr,
})
23 changes: 21 additions & 2 deletions arc/job/adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@ class JobEnum(str, Enum):
# TS search methods
autotst = 'autotst' # AutoTST, 10.1021/acs.jpca.7b07361, 10.26434/chemrxiv.13277870.v2
heuristics = 'heuristics' # ARC's heuristics
crest = 'crest' # CREST conformer/TS search
kinbot = 'kinbot' # KinBot, 10.1016/j.cpc.2019.106947
gcn = 'gcn' # Graph neural network for isomerization, https://doi.org/10.1021/acs.jpclett.0c00500
user = 'user' # user guesses
Expand Down Expand Up @@ -780,8 +781,7 @@ def _get_additional_job_info(self):
content += '\n'
else:
raise ValueError(f'Unrecognized cluster software: {cluster_soft}')
if content:
self.additional_job_info = content.lower()
self.additional_job_info = content.lower() if content else None

def _check_job_server_status(self) -> str:
"""
Expand All @@ -801,6 +801,10 @@ def _check_job_ess_status(self):
Raises:
IOError: If the output file and any additional server information cannot be found.
"""
existing_keywords = list(self.job_status[1].get('keywords', list()))
# Refresh scheduler-side logs before ESS parsing so server-reported OOMs
# can be detected even when the output file is absent or incomplete.
self._get_additional_job_info()
if self.server != 'local' and self.execution_type != 'incore':
if os.path.exists(self.local_path_to_output_file):
os.remove(self.local_path_to_output_file)
Expand Down Expand Up @@ -839,7 +843,22 @@ def _check_job_ess_status(self):
)
else:
status, keywords, error, line = '', '', '', ''
if self.additional_job_info:
try:
status, keywords, error, line = determine_ess_status(
output_path=self.local_path_to_output_file,
species_label=self.species_label,
job_type=self.job_type,
job_log=self.additional_job_info,
software=self.job_adapter,
)
except FileNotFoundError:
status, keywords, error, line = '', '', '', ''
self.job_status[1]['status'] = status
if 'max_total_job_memory' in existing_keywords and status == 'errored' \
and isinstance(keywords, list) and 'Memory' in keywords \
and 'max_total_job_memory' not in keywords:
keywords.append('max_total_job_memory')
self.job_status[1]['keywords'] = keywords
self.job_status[1]['error'] = error
self.job_status[1]['line'] = line.rstrip()
Expand Down
60 changes: 60 additions & 0 deletions arc/job/adapter_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,12 @@ def setUpClass(cls):
server='server3',
testing=True,
)
os.makedirs(cls.job_5.local_path, exist_ok=True)
fixture_path = os.path.join(ARC_TESTING_PATH, 'trsh', 'wall_exceeded.txt')
with open(fixture_path, 'r') as f:
log_content = f.read()
with open(os.path.join(cls.job_5.local_path, 'out.txt'), 'w') as f:
f.write(log_content)
cls.job_6 = GaussianAdapter(execution_type='queue',
job_name='opt_101',
job_type='opt',
Expand Down Expand Up @@ -248,6 +254,24 @@ def test_set_cpu_and_mem(self):
self.assertEqual(self.job_4.submit_script_memory, expected_memory)
self.job_4.server = 'local'

def test_set_cpu_and_mem_marks_max_total_job_memory(self):
"""Test tagging jobs whose requested memory is clipped to the node cap."""
job = GaussianAdapter(execution_type='queue',
job_type='opt',
level=Level(method='cbs-qb3'),
project='test',
project_directory=os.path.join(ARC_TESTING_PATH, 'test_JobAdapter'),
species=[ARCSpecies(label='spc1', xyz=['O 0 0 1'])],
server='server2',
job_memory_gb=300,
testing=True,
)

job.set_cpu_and_mem()

self.assertAlmostEqual(job.job_memory_gb, 256 * 0.95)
self.assertIn('max_total_job_memory', job.job_status[1]['keywords'])

def test_set_file_paths(self):
"""Test setting up the job's paths"""
self.assertEqual(self.job_1.local_path, os.path.join(self.job_1.project_directory, 'calcs', 'Species',
Expand Down Expand Up @@ -321,6 +345,42 @@ def test_determine_job_status(self):
self.assertEqual(self.job_5.job_status[1]['status'], 'errored')
self.assertEqual(self.job_5.job_status[1]['keywords'], ['ServerTimeLimit'])

@patch('arc.job.adapter.determine_ess_status')
def test_preserve_max_total_job_memory_keyword(self, mock_determine_ess_status):
"""Test preserving the max_total_job_memory marker across ESS status parsing."""
self.job_4.job_status[1]['keywords'] = ['max_total_job_memory']
self.job_4.initial_time = datetime.datetime.now() - datetime.timedelta(minutes=2)
self.job_4.final_time = datetime.datetime.now() - datetime.timedelta(minutes=1)
os.makedirs(self.job_4.local_path, exist_ok=True)
with open(self.job_4.local_path_to_output_file, 'w') as f:
f.write('dummy output')
mock_determine_ess_status.return_value = (
'errored',
['MDCI', 'Memory'],
'Insufficient job memory.',
'Please increase MaxCore',
)

self.job_4._check_job_ess_status()

self.assertEqual(self.job_4.job_status[1]['status'], 'errored')
self.assertEqual(self.job_4.job_status[1]['keywords'], ['MDCI', 'Memory', 'max_total_job_memory'])

def test_check_job_ess_status_without_output_uses_job_log_memory_error(self):
"""Test detecting server-reported memory errors even when the output file is absent."""
if os.path.isfile(self.job_4.local_path_to_output_file):
os.remove(self.job_4.local_path_to_output_file)
self.job_4.initial_time = datetime.datetime.now() - datetime.timedelta(minutes=2)
self.job_4.final_time = datetime.datetime.now() - datetime.timedelta(minutes=1)
self.job_4.additional_job_info = '\tMEMORY EXCEEDED\n'

with patch.object(self.job_4, '_get_additional_job_info'):
self.job_4._check_job_ess_status()

self.assertEqual(self.job_4.job_status[1]['status'], 'errored')
self.assertEqual(self.job_4.job_status[1]['keywords'], ['Memory'])
self.assertEqual(self.job_4.job_status[1]['error'], 'Insufficient job memory.')

@patch(
"arc.job.trsh.servers",
{
Expand Down
34 changes: 20 additions & 14 deletions arc/job/adapters/orca.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,13 +84,14 @@ def _format_orca_basis(basis: str) -> str:
# job_type_2: reserved for Opt + Freq.
# restricted: 'R' = closed-shell SCF, 'U' = spin unrestricted SCF, 'RO' = open-shell spin restricted SCF
# auxiliary_basis: required for DLPNO calculations (speed up calculation)
# cabs: Complementary Auxiliary Basis Set for F12 calculations (e.g., cc-pVTZ-F12-CABS)
# memory: MB per core (must increase as system gets larger)
# cpus: must be less than number of electron pairs, defaults to min(heavy atoms, cpus limit)
# job_options_blocks: input blocks that enable detailed control over program
# job_options_keywords: input keywords that control the job
# method_class: 'HF' for wavefunction methods (hf, mp, cc, dlpno ...). 'KS' for DFT methods.
# options: additional keywords to control job (e.g., TightSCF, NormalPNO ...)
input_template = """!${restricted}${method_class} ${method} ${basis} ${auxiliary_basis} ${keywords}
input_template = """!${restricted}${method_class} ${method} ${basis} ${auxiliary_basis}${cabs} ${keywords}
!${job_type_1}
${job_type_2}
%%maxcore ${memory}
Expand Down Expand Up @@ -254,6 +255,12 @@ def write_input_file(self) -> None:
"""
Write the input file to execute the job on the server.
"""
if 'f12' in self.level.method and not self.level.cabs:
raise ValueError(
f"Level '{self.level}' uses an F12 method without a CABS basis. "
f"Set `cabs:` in the level spec (e.g. cc-pVTZ-F12-CABS). "
f"Without it ORCA runs with DimCABS = 0 and returns non-F12 energies."
)
input_dict = dict()
for key in ['block',
'scan',
Expand All @@ -264,6 +271,7 @@ def write_input_file(self) -> None:
input_dict[key] = ''
input_dict['auxiliary_basis'] = _format_orca_basis(self.level.auxiliary_basis or '')
input_dict['basis'] = _format_orca_basis(self.level.basis or '')
input_dict['cabs'] = f' {_format_orca_basis(self.level.cabs)}' if self.level.cabs else ''
input_dict['charge'] = self.charge
input_dict['cpus'] = self.cpu_cores
input_dict['label'] = self.species_label
Expand All @@ -272,30 +280,28 @@ def write_input_file(self) -> None:
input_dict['multiplicity'] = self.multiplicity
input_dict['xyz'] = xyz_to_str(self.xyz)

scf_convergence = self.args['keyword'].get('scf_convergence', '').lower() or \
orca_default_options_dict['global']['keyword'].get('scf_convergence', '').lower()
if not scf_convergence:
self.args['keyword'].setdefault(
'scf_convergence',
orca_default_options_dict['global']['keyword'].get('scf_convergence', '').lower())
if not self.args['keyword']['scf_convergence']:
raise ValueError('Orca SCF convergence is not specified. Please specify this variable either in '
'settings.py as default or in the input file as additional options.')
self.add_to_args(val=scf_convergence, key1='keyword')

# Orca requires different blocks for wavefunction methods and DFT methods
if self.level.method_type == 'dft':
input_dict['method_class'] = 'KS'
# DFT grid must be the same for both opt and freq
if self.fine:
self.add_to_args(val='defgrid3', key1='keyword')
else:
self.add_to_args(val='defgrid2', key1='keyword')
# DFT grid must be the same for both opt and freq.
# Users can override by setting `dft_grid` in args.keyword (e.g. dft_grid: DEFGRID1).
self.args['keyword'].setdefault('dft_grid', 'defgrid3' if self.fine else 'defgrid2')
elif self.level.method_type == 'wavefunction':
input_dict['method_class'] = 'HF'
if 'dlpno' in self.level.method:
dlpno_threshold = self.args['keyword'].get('dlpno_threshold', '').lower() or \
orca_default_options_dict['global']['keyword'].get('dlpno_threshold', '').lower()
if not dlpno_threshold:
self.args['keyword'].setdefault(
'dlpno_threshold',
orca_default_options_dict['global']['keyword'].get('dlpno_threshold', '').lower())
if not self.args['keyword']['dlpno_threshold']:
raise ValueError('Orca DLPNO threshold is not specified. Please specify this variable either in '
'settings.py as default or in the input file as additional options.')
self.add_to_args(val=dlpno_threshold, key1='keyword')
else:
logger.debug(f'Running {self.level.method_type} {self.level.method} method in Orca.')

Expand Down
28 changes: 28 additions & 0 deletions arc/job/adapters/orca_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,34 @@ def test_set_input_file_memory(self):
expected_memory = math.ceil(14 * 1024 / 8)
self.assertEqual(self.job_1.input_file_memory, expected_memory)

def test_set_input_file_memory_with_configured_core_count(self):
"""Test ORCA %%maxcore calculation for a configured total memory and cpu count."""
original_memory = self.job_1.job_memory_gb
original_cpu_cores = self.job_1.cpu_cores
self.job_1.job_memory_gb = 250
self.job_1.cpu_cores = 22
self.job_1.set_input_file_memory()
self.assertEqual(self.job_1.input_file_memory, math.ceil(250 * 1024 / 22))
self.job_1.job_memory_gb = original_memory
self.job_1.cpu_cores = original_cpu_cores
self.job_1.set_input_file_memory()

def test_write_input_file_with_configured_core_count(self):
"""Test rendering ORCA input for a configured total memory and cpu count."""
original_memory = self.job_1.job_memory_gb
original_cpu_cores = self.job_1.cpu_cores
self.job_1.job_memory_gb = 250
self.job_1.cpu_cores = 22
self.job_1.set_input_file_memory()
self.job_1.write_input_file()
with open(os.path.join(self.job_1.local_path, input_filenames[self.job_1.job_adapter]), 'r') as f:
content = f.read()
self.assertIn('%maxcore 11637', content)
self.assertIn('%pal nprocs 22 end', content)
self.job_1.job_memory_gb = original_memory
self.job_1.cpu_cores = original_cpu_cores
self.job_1.set_input_file_memory()

def test_write_input_file(self):
"""Test writing Orca input files"""
self.job_1.write_input_file()
Expand Down
2 changes: 2 additions & 0 deletions arc/job/adapters/ts/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import arc.job.adapters.ts.autotst_ts
import arc.job.adapters.ts.crest
import arc.job.adapters.ts.gcn_ts
import arc.job.adapters.ts.heuristics
import arc.job.adapters.ts.kinbot_ts
import arc.job.adapters.ts.seed_hub
import arc.job.adapters.ts.xtb_gsm
import arc.job.adapters.ts.orca_neb
Loading
Loading