Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions ncar_jobqueue/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def _get_base_class():
'Using a local cluster via `distributed.LocalCluster`.'
)

if is_notebook and running_from_jupyterhub and host in {'cheyenne', 'casper-dav'}:
if is_notebook and running_from_jupyterhub and host in {'cheyenne', 'casper'}:
dashboard_link = 'https://jupyterhub.hpc.ucar.edu/stable/user/{USER}/proxy/{port}/status'
if jupyterhub_server_name:
dashboard_link = (
Expand All @@ -44,7 +44,7 @@ class NCARCluster:
-------
cluster : cluster object

- `dask_jobqueue.PBSCluster`, if the host on Cheyenne, Casper (DAV), CGD's Hobart and Izumi clusters.
- `dask_jobqueue.PBSCluster`, if the host on Derecho, Casper (DAV), CGD's Hobart and Izumi clusters.
- `distributed.LocalCluster` otherwise.
"""

Expand Down
17 changes: 0 additions & 17 deletions ncar_jobqueue/ncar-jobqueue.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,23 +15,6 @@ derecho:
env-extra: []
death-timeout: 60

cheyenne:
pbs:
# project: XXXXXXXX
name: dask-worker-cheyenne
cores: 36 # Total number of cores per job
memory: '109GiB' # Total amount of memory per job
processes: 9 # Number of Python processes per job
interface: ext # Network interface to use like eth0 or ib0
queue: regular
walltime: '01:00:00'
resource-spec: select=1:ncpus=36:mem=109GB
log-directory: '/glade/cheyenne/scratch/${USER}/dask/logs'
local-directory: '/glade/cheyenne/scratch/${USER}/dask/local-dir'
job-extra: []
env-extra: []
death-timeout: 60

casper:
pbs:
# project: XXXXXXXX
Expand Down
15 changes: 5 additions & 10 deletions ncar_jobqueue/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,27 +10,22 @@
CLUSTERS = {
'hobart': Cluster('pbs', dask_jobqueue.PBSCluster),
'izumi': Cluster('pbs', dask_jobqueue.PBSCluster),
'cheyenne': Cluster('pbs', dask_jobqueue.PBSCluster),
'casper-dav': Cluster('pbs', dask_jobqueue.PBSCluster),
'casper': Cluster('pbs', dask_jobqueue.PBSCluster),
'unknown': Cluster('pbs', distributed.LocalCluster),
'derecho': Cluster('pbs', dask_jobqueue.PBSCluster),
}
cheyenne_login = re.compile(r'cheyenne([0-9]+).cheyenne.ucar.edu')
cheyenne_compute = re.compile(r'r([a-zA-Z0-9]+).ib0.cheyenne.ucar.edu')

derecho_compute = re.compile(r'dec[0-9]+\.hsn\.de\.hpc\.ucar\.edu')
derecho_login = re.compile(r'derecho[0-9]+\.hsn\.de\.hpc\.ucar\.edu')

dav_login = re.compile(r'casper')
dav_compute = re.compile(r'crhtc([a-zA-Z0-9]+).hpc.ucar.edu')
casper_login = re.compile(r'casper')
casper_compute = re.compile(r'crhtc([a-zA-Z0-9]+).hpc.ucar.edu')
hobart = re.compile(r'h([a-zA-Z0-9]+).cgd.ucar.edu')
izumi = re.compile(r'i([a-zA-Z0-9]+).unified.ucar.edu')

regexes = [
('cheyenne', cheyenne_login),
('cheyenne', cheyenne_compute),
('casper-dav', dav_login),
('casper-dav', dav_compute),
('casper', casper_login),
('casper', casper_compute),
('izumi', izumi),
('hobart', hobart),
('derecho', derecho_compute),
Expand Down
Loading