Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

RapidsAI is broken #37

Open
yug0slav opened this issue Aug 5, 2020 · 0 comments
Open

RapidsAI is broken #37

yug0slav opened this issue Aug 5, 2020 · 0 comments
Assignees
Labels
bug Something isn't working

Comments

@yug0slav
Copy link

yug0slav commented Aug 5, 2020

Describe the bug
RapidsAI isnt working in ai-lab:20.03 or 20.06

To Reproduce
import cudf

Expected behavior
no error

Screenshots

import cudf

---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-1-e13365c50bc4> in <module>
----> 1 import cudf

/opt/conda/lib/python3.6/site-packages/cudf/__init__.py in <module>
      3 import cupy
      4 
----> 5 import rmm
      6 
      7 from cudf import core, datasets

/opt/conda/lib/python3.6/site-packages/rmm/__init__.py in <module>
     15 import weakref
     16 
---> 17 from rmm.rmm import (
     18     RMMError,
     19     _finalize,

/opt/conda/lib/python3.6/site-packages/rmm/rmm.py in <module>
     17 
     18 import numpy as np
---> 19 from numba import cuda
     20 
     21 import rmm._lib as librmm

/opt/conda/lib/python3.6/site-packages/numba/__init__.py in <module>
    296 
    297 # Initialize typed containers
--> 298 import numba.typed

/opt/conda/lib/python3.6/site-packages/numba/typed/__init__.py in <module>
----> 1 from .typeddict import Dict
      2 from .typedlist import List

/opt/conda/lib/python3.6/site-packages/numba/typed/typeddict.py in <module>
     20 
     21 
---> 22 @njit
     23 def _make_dict(keyty, valty):
     24     return dictobject._as_meminfo(dictobject.new_dict(keyty, valty))

/opt/conda/lib/python3.6/site-packages/numba/core/decorators.py in njit(*args, **kws)
    234         warnings.warn('forceobj is set for njit and is ignored', RuntimeWarning)
    235     kws.update({'nopython': True})
--> 236     return jit(*args, **kws)
    237 
    238 

/opt/conda/lib/python3.6/site-packages/numba/core/decorators.py in jit(signature_or_function, locals, target, cache, pipeline_class, boundscheck, **options)
    171                    targetoptions=options, **dispatcher_args)
    172     if pyfunc is not None:
--> 173         return wrapper(pyfunc)
    174     else:
    175         return wrapper

/opt/conda/lib/python3.6/site-packages/numba/core/decorators.py in wrapper(func)
    187         disp = dispatcher(py_func=func, locals=locals,
    188                           targetoptions=targetoptions,
--> 189                           **dispatcher_args)
    190         if cache:
    191             disp.enable_caching()

/opt/conda/lib/python3.6/site-packages/numba/core/dispatcher.py in __init__(self, py_func, locals, targetoptions, impl_kind, pipeline_class)
    668         """
    669         self.typingctx = self.targetdescr.typing_context
--> 670         self.targetctx = self.targetdescr.target_context
    671 
    672         pysig = utils.pysignature(py_func)

/opt/conda/lib/python3.6/site-packages/numba/core/registry.py in target_context(self)
     45             return nested
     46         else:
---> 47             return self._toplevel_target_context
     48 
     49     @property

/opt/conda/lib/python3.6/site-packages/numba/core/utils.py in __get__(self, instance, type)
    329         if instance is None:
    330             return self
--> 331         res = instance.__dict__[self.name] = self.func(instance)
    332         return res
    333 

/opt/conda/lib/python3.6/site-packages/numba/core/registry.py in _toplevel_target_context(self)
     29     def _toplevel_target_context(self):
     30         # Lazily-initialized top-level target context, for all threads
---> 31         return cpu.CPUContext(self.typing_context)
     32 
     33     @utils.cached_property

/opt/conda/lib/python3.6/site-packages/numba/core/base.py in __init__(self, typing_context)
    257 
    258         # Initialize
--> 259         self.init()
    260 
    261     def init(self):

/opt/conda/lib/python3.6/site-packages/numba/core/compiler_lock.py in _acquire_compile_lock(*args, **kwargs)
     30         def _acquire_compile_lock(*args, **kwargs):
     31             with self:
---> 32                 return func(*args, **kwargs)
     33         return _acquire_compile_lock
     34 

/opt/conda/lib/python3.6/site-packages/numba/core/cpu.py in init(self)
     45     def init(self):
     46         self.is32bit = (utils.MACHINE_BITS == 32)
---> 47         self._internal_codegen = codegen.JITCPUCodegen("numba.exec")
     48 
     49         # Add ARM ABI functions from libgcc_s

/opt/conda/lib/python3.6/site-packages/numba/core/codegen.py in __init__(self, module_name)
    643         self._llvm_module.name = "global_codegen_module"
    644         self._rtlinker = RuntimeLinker()
--> 645         self._init(self._llvm_module)
    646 
    647     def _init(self, llvm_module):

/opt/conda/lib/python3.6/site-packages/numba/core/codegen.py in _init(self, llvm_module)
    652         self._tm_features = self._customize_tm_features()
    653         self._customize_tm_options(tm_options)
--> 654         tm = target.create_target_machine(**tm_options)
    655         engine = ll.create_mcjit_compiler(llvm_module, tm)
    656 

TypeError: create_target_machine() got an unexpected keyword argument 'jitdebug'

Client (please complete the following information):

  • CentOS/Docker
  • Browser: Chrome

Container Version
20.03 and 20.06

Additional context
Add any other context about the problem here.

@yug0slav yug0slav added the bug Something isn't working label Aug 5, 2020
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
bug Something isn't working
Projects
None yet
Development

No branches or pull requests

2 participants