Looking for advice on "ValueError: ctypes objects containing pointers cannot be pickled

Calling scipy.special.cython_special-function from another module, seems to let me execute the function on a cluster. I think, serialising the pythonfile where a c-function is defined is not possible/allowed.

cython_special.py
import ctypes
import scipy
from numba import njit
from numba.extending import get_cython_function_address

addr = get_cython_function_address('scipy.special.cython_special', 'huber')
functype = ctypes.CFUNCTYPE(ctypes.c_double, ctypes.c_double, ctypes.c_double)
chuber = functype(addr)

@njit(cache=False)
def nchuber(delta, r):
    return chuber(delta, r)

module.py
from numba import njit
import cython_special as cs

@njit(cache=False)
def call_nchuber(delta, r):
    return cs.nchuber(delta, r)

from distributed import Client, LocalCluster
cluster=LocalCluster()
client=Client(cluster)
import module as m
import cython_special as cs
Calling from another module works:
submitted = client.submit(m.call_nchuber, 1.0, 4.0)
submitted.result()
[Out]: 3.5
Submitting the function from the module where the C-function is defined does not work:
submitted = client.submit(cs.nchuber, 1.0, 4.0)
submitted.result()
[Out]: 2024-02-28 19:02:43,072 - distributed.protocol.pickle - INFO - Failed to serialize CPUDispatcher(<function nchuber at 0x7f50bd4fe440>). Exception: ctypes objects containing pointers cannot be pickled
ValueError: ctypes objects containing pointers cannot be pickled
ValueError: ctypes objects containing pointers cannot be pickled (Traceback)
---------------------------------------------------------------------------
KeyError                                  Traceback (most recent call last)
File /srv/conda/envs/notebook/lib/python3.10/site-packages/distributed/worker.py:2919, in dumps_function(func)
   2918     with _cache_lock:
-> 2919         result = cache_dumps[func]
   2920 except KeyError:

File /srv/conda/envs/notebook/lib/python3.10/site-packages/distributed/collections.py:24, in LRU.__getitem__(self, key)
     23 def __getitem__(self, key):
---> 24     value = super().__getitem__(key)
     25     cast(OrderedDict, self.data).move_to_end(key)

File /srv/conda/envs/notebook/lib/python3.10/collections/__init__.py:1106, in UserDict.__getitem__(self, key)
   1105     return self.__class__.__missing__(self, key)
-> 1106 raise KeyError(key)

KeyError: CPUDispatcher(<function nchuber at 0x7faff5313130>)

During handling of the above exception, another exception occurred:

ValueError                                Traceback (most recent call last)
File /srv/conda/envs/notebook/lib/python3.10/site-packages/distributed/protocol/pickle.py:46, in dumps(x, buffer_callback, protocol)
     45 buffers.clear()
---> 46 result = pickle.dumps(x, **dump_kwargs)
     47 if b"__main__" in result or (
     48     CLOUDPICKLE_GTE_20
     49     and getattr(inspect.getmodule(x), "__name__", None)
     50     in cloudpickle.list_registry_pickle_by_value()
     51 ):

File /srv/conda/envs/notebook/lib/python3.10/site-packages/numba/core/serialize.py:106, in _pickle__CustomPickled(cp)
    102 """standard pickling for `_CustomPickled`.
    103 
    104 Uses `NumbaPickler` to dump.
    105 """
--> 106 serialized = dumps((cp.ctor, cp.states))
    107 return _unpickle__CustomPickled, (serialized,)

File /srv/conda/envs/notebook/lib/python3.10/site-packages/numba/core/serialize.py:57, in dumps(obj)
     56 p = pickler(buf, protocol=4)
---> 57 p.dump(obj)
     58 pickled = buf.getvalue()

File /srv/conda/envs/notebook/lib/python3.10/site-packages/numba/cloudpickle/cloudpickle_fast.py:568, in CloudPickler.dump(self, obj)
    567 try:
--> 568     return Pickler.dump(self, obj)
    569 except RuntimeError as e:

ValueError: ctypes objects containing pointers cannot be pickled

During handling of the above exception, another exception occurred:

ValueError                                Traceback (most recent call last)
Cell In[3], line 1
----> 1 submitted = client.submit(cs.nchuber, 1.0, 4.0)
      2 submitted.result()

File /srv/conda/envs/notebook/lib/python3.10/site-packages/distributed/client.py:1883, in Client.submit(self, func, key, workers, resources, retries, priority, fifo_timeout, allow_other_workers, actor, actors, pure, *args, **kwargs)
   1880 else:
   1881     dsk = {skey: (func,) + tuple(args)}
-> 1883 futures = self._graph_to_futures(
   1884     dsk,
   1885     [skey],
   1886     workers=workers,
   1887     allow_other_workers=allow_other_workers,
   1888     priority={skey: 0},
   1889     user_priority=priority,
   1890     resources=resources,
   1891     retries=retries,
   1892     fifo_timeout=fifo_timeout,
   1893     actors=actor,
   1894 )
   1896 logger.debug("Submit %s(...), %s", funcname(func), key)
   1898 return futures[skey]

File /srv/conda/envs/notebook/lib/python3.10/site-packages/distributed/client.py:3010, in Client._graph_to_futures(self, dsk, keys, workers, allow_other_workers, priority, user_priority, resources, retries, fifo_timeout, actors)
   3008 # Pack the high level graph before sending it to the scheduler
   3009 keyset = set(keys)
-> 3010 dsk = dsk.__dask_distributed_pack__(self, keyset, annotations)
   3012 # Create futures before sending graph (helps avoid contention)
   3013 futures = {key: Future(key, self, inform=False) for key in keyset}

File /srv/conda/envs/notebook/lib/python3.10/site-packages/dask/highlevelgraph.py:1078, in HighLevelGraph.__dask_distributed_pack__(self, client, client_keys, annotations)
   1072 layers = []
   1073 for layer in (self.layers[name] for name in self._toposort_layers()):
   1074     layers.append(
   1075         {
   1076             "__module__": layer.__module__,
   1077             "__name__": type(layer).__name__,
-> 1078             "state": layer.__dask_distributed_pack__(
   1079                 self.get_all_external_keys(),
   1080                 self.key_dependencies,
   1081                 client,
   1082                 client_keys,
   1083             ),
   1084             "annotations": layer.__dask_distributed_annotations_pack__(
   1085                 annotations
   1086             ),
   1087         }
   1088     )
   1089 return {"layers": layers}

File /srv/conda/envs/notebook/lib/python3.10/site-packages/dask/highlevelgraph.py:432, in Layer.__dask_distributed_pack__(self, all_hlg_keys, known_key_dependencies, client, client_keys)
    427 merged_hlg_keys = all_hlg_keys | dsk.keys()
    428 dsk = {
    429     stringify(k): stringify(v, exclusive=merged_hlg_keys)
    430     for k, v in dsk.items()
    431 }
--> 432 dsk = toolz.valmap(dumps_task, dsk)
    433 return {"dsk": dsk, "dependencies": dependencies}

File /srv/conda/envs/notebook/lib/python3.10/site-packages/cytoolz/dicttoolz.pyx:178, in cytoolz.dicttoolz.valmap()

File /srv/conda/envs/notebook/lib/python3.10/site-packages/cytoolz/dicttoolz.pyx:203, in cytoolz.dicttoolz.valmap()

File /srv/conda/envs/notebook/lib/python3.10/site-packages/distributed/worker.py:2957, in dumps_task(task)
   2955         return d
   2956     elif not any(map(_maybe_complex, task[1:])):
-> 2957         return {"function": dumps_function(task[0]), "args": warn_dumps(task[1:])}
   2958 return to_serialize(task)

File /srv/conda/envs/notebook/lib/python3.10/site-packages/distributed/worker.py:2921, in dumps_function(func)
   2919         result = cache_dumps[func]
   2920 except KeyError:
-> 2921     result = pickle.dumps(func)
   2922     if len(result) < 100000:
   2923         with _cache_lock:

File /srv/conda/envs/notebook/lib/python3.10/site-packages/distributed/protocol/pickle.py:58, in dumps(x, buffer_callback, protocol)
     56 try:
     57     buffers.clear()
---> 58     result = cloudpickle.dumps(x, **dump_kwargs)
     59 except Exception as e:
     60     logger.info("Failed to serialize %s. Exception: %s", x, e)

File /srv/conda/envs/notebook/lib/python3.10/site-packages/cloudpickle/cloudpickle_fast.py:73, in dumps(obj, protocol, buffer_callback)
     69 with io.BytesIO() as file:
     70     cp = CloudPickler(
     71         file, protocol=protocol, buffer_callback=buffer_callback
     72     )
---> 73     cp.dump(obj)
     74     return file.getvalue()

File /srv/conda/envs/notebook/lib/python3.10/site-packages/cloudpickle/cloudpickle_fast.py:632, in CloudPickler.dump(self, obj)
    630 def dump(self, obj):
    631     try:
--> 632         return Pickler.dump(self, obj)
    633     except RuntimeError as e:
    634         if "recursion" in e.args[0]:

File /srv/conda/envs/notebook/lib/python3.10/site-packages/numba/core/serialize.py:106, in _pickle__CustomPickled(cp)
    101 def _pickle__CustomPickled(cp):
    102     """standard pickling for `_CustomPickled`.
    103 
    104     Uses `NumbaPickler` to dump.
    105     """
--> 106     serialized = dumps((cp.ctor, cp.states))
    107     return _unpickle__CustomPickled, (serialized,)

File /srv/conda/envs/notebook/lib/python3.10/site-packages/numba/core/serialize.py:57, in dumps(obj)
     55 with io.BytesIO() as buf:
     56     p = pickler(buf, protocol=4)
---> 57     p.dump(obj)
     58     pickled = buf.getvalue()
     60 return pickled

File /srv/conda/envs/notebook/lib/python3.10/site-packages/numba/cloudpickle/cloudpickle_fast.py:568, in CloudPickler.dump(self, obj)
    566 def dump(self, obj):
    567     try:
--> 568         return Pickler.dump(self, obj)
    569     except RuntimeError as e:
    570         if "recursion" in e.args[0]:

ValueError: ctypes objects containing pointers cannot be pickled