Instructions to use zai-org/codegeex2-6b with libraries, inference providers, notebooks, and local apps. Follow these links to get started.
- Libraries
- Transformers
How to use zai-org/codegeex2-6b with Transformers:
# Load model directly from transformers import AutoModel model = AutoModel.from_pretrained("zai-org/codegeex2-6b", trust_remote_code=True, dtype="auto") - Notebooks
- Google Colab
- Kaggle
Add support for Ascend NPU
#7
by statelesshz - opened
- modeling_chatglm.py +1 -1
modeling_chatglm.py
CHANGED
|
@@ -26,7 +26,7 @@ from .configuration_chatglm import ChatGLMConfig
|
|
| 26 |
|
| 27 |
# flags required to enable jit fusion kernels
|
| 28 |
|
| 29 |
-
if sys.platform != 'darwin':
|
| 30 |
torch._C._jit_set_profiling_mode(False)
|
| 31 |
torch._C._jit_set_profiling_executor(False)
|
| 32 |
torch._C._jit_override_can_fuse_on_cpu(True)
|
|
|
|
| 26 |
|
| 27 |
# flags required to enable jit fusion kernels
|
| 28 |
|
| 29 |
+
if sys.platform != 'darwin' and torch.cuda.is_available():
|
| 30 |
torch._C._jit_set_profiling_mode(False)
|
| 31 |
torch._C._jit_set_profiling_executor(False)
|
| 32 |
torch._C._jit_override_can_fuse_on_cpu(True)
|